aboutsummaryrefslogtreecommitdiff
path: root/compiler/src/dotty
diff options
context:
space:
mode:
authorFelix Mulder <felix.mulder@gmail.com>2016-11-02 11:08:28 +0100
committerGuillaume Martres <smarter@ubuntu.com>2016-11-22 01:35:07 +0100
commit8a61ff432543a29234193cd1f7c14abd3f3d31a0 (patch)
treea8147561d307af862c295cfc8100d271063bb0dd /compiler/src/dotty
parent6a455fe6da5ff9c741d91279a2dc6fe2fb1b472f (diff)
downloaddotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.gz
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.bz2
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.zip
Move compiler and compiler tests to compiler dir
Diffstat (limited to 'compiler/src/dotty')
-rw-r--r--compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala125
-rw-r--r--compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala42
-rw-r--r--compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala1102
-rw-r--r--compiler/src/dotty/tools/backend/jvm/GenBCode.scala433
-rw-r--r--compiler/src/dotty/tools/backend/jvm/LabelDefs.scala223
-rw-r--r--compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala417
-rw-r--r--compiler/src/dotty/tools/dotc/Bench.scala46
-rw-r--r--compiler/src/dotty/tools/dotc/CompilationUnit.scala24
-rw-r--r--compiler/src/dotty/tools/dotc/Compiler.scala145
-rw-r--r--compiler/src/dotty/tools/dotc/Driver.scala134
-rw-r--r--compiler/src/dotty/tools/dotc/FromTasty.scala107
-rw-r--r--compiler/src/dotty/tools/dotc/Main.scala9
-rw-r--r--compiler/src/dotty/tools/dotc/Resident.scala58
-rw-r--r--compiler/src/dotty/tools/dotc/Run.scala138
-rw-r--r--compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled258
-rw-r--r--compiler/src/dotty/tools/dotc/ast/Desugar.scala1089
-rw-r--r--compiler/src/dotty/tools/dotc/ast/NavigateAST.scala82
-rw-r--r--compiler/src/dotty/tools/dotc/ast/PluggableTransformers.scala105
-rw-r--r--compiler/src/dotty/tools/dotc/ast/Positioned.scala213
-rw-r--r--compiler/src/dotty/tools/dotc/ast/TreeInfo.scala733
-rw-r--r--compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala187
-rw-r--r--compiler/src/dotty/tools/dotc/ast/Trees.scala1295
-rw-r--r--compiler/src/dotty/tools/dotc/ast/tpd.scala952
-rw-r--r--compiler/src/dotty/tools/dotc/ast/untpd.scala562
-rw-r--r--compiler/src/dotty/tools/dotc/config/CompilerCommand.scala128
-rw-r--r--compiler/src/dotty/tools/dotc/config/Config.scala138
-rw-r--r--compiler/src/dotty/tools/dotc/config/JavaPlatform.scala70
-rw-r--r--compiler/src/dotty/tools/dotc/config/OutputDirs.scala116
-rw-r--r--compiler/src/dotty/tools/dotc/config/PathResolver.scala281
-rw-r--r--compiler/src/dotty/tools/dotc/config/Platform.scala39
-rw-r--r--compiler/src/dotty/tools/dotc/config/Printers.scala34
-rw-r--r--compiler/src/dotty/tools/dotc/config/Properties.scala165
-rw-r--r--compiler/src/dotty/tools/dotc/config/ScalaSettings.scala267
-rw-r--r--compiler/src/dotty/tools/dotc/config/ScalaVersion.scala184
-rw-r--r--compiler/src/dotty/tools/dotc/config/Settings.scala270
-rw-r--r--compiler/src/dotty/tools/dotc/config/WrappedProperties.scala34
-rw-r--r--compiler/src/dotty/tools/dotc/core/Annotations.scala162
-rw-r--r--compiler/src/dotty/tools/dotc/core/CheckRealizable.scala132
-rw-r--r--compiler/src/dotty/tools/dotc/core/Comments.scala459
-rw-r--r--compiler/src/dotty/tools/dotc/core/Constants.scala235
-rw-r--r--compiler/src/dotty/tools/dotc/core/Constraint.scala154
-rw-r--r--compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala458
-rw-r--r--compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala17
-rw-r--r--compiler/src/dotty/tools/dotc/core/Contexts.scala709
-rw-r--r--compiler/src/dotty/tools/dotc/core/Decorators.scala185
-rw-r--r--compiler/src/dotty/tools/dotc/core/Definitions.scala807
-rw-r--r--compiler/src/dotty/tools/dotc/core/DenotTransformers.scala78
-rw-r--r--compiler/src/dotty/tools/dotc/core/Denotations.scala1217
-rw-r--r--compiler/src/dotty/tools/dotc/core/Flags.scala640
-rw-r--r--compiler/src/dotty/tools/dotc/core/Hashable.scala103
-rw-r--r--compiler/src/dotty/tools/dotc/core/Mode.scala89
-rw-r--r--compiler/src/dotty/tools/dotc/core/NameOps.scala432
-rw-r--r--compiler/src/dotty/tools/dotc/core/Names.scala372
-rw-r--r--compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala636
-rw-r--r--compiler/src/dotty/tools/dotc/core/Periods.scala159
-rw-r--r--compiler/src/dotty/tools/dotc/core/Phases.scala377
-rw-r--r--compiler/src/dotty/tools/dotc/core/Scopes.scala437
-rw-r--r--compiler/src/dotty/tools/dotc/core/Signature.scala103
-rw-r--r--compiler/src/dotty/tools/dotc/core/StdNames.scala844
-rw-r--r--compiler/src/dotty/tools/dotc/core/Substituters.scala306
-rw-r--r--compiler/src/dotty/tools/dotc/core/SymDenotations.scala2004
-rw-r--r--compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala267
-rw-r--r--compiler/src/dotty/tools/dotc/core/Symbols.scala602
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeApplications.scala688
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeComparer.scala1502
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeErasure.scala514
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeOps.scala554
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala40
-rw-r--r--compiler/src/dotty/tools/dotc/core/TyperState.scala210
-rw-r--r--compiler/src/dotty/tools/dotc/core/Types.overflow66
-rw-r--r--compiler/src/dotty/tools/dotc/core/Types.scala3865
-rw-r--r--compiler/src/dotty/tools/dotc/core/Uniques.scala128
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala88
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala221
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala378
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala1100
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala53
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala101
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala79
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala39
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala188
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala553
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala71
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala122
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala141
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala95
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala188
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala641
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala1161
-rw-r--r--compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala299
-rw-r--r--compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala1260
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala132
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala898
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala538
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala92
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala257
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala466
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Parsers.scala2309
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Scanners.scala1014
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala145
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala264
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Tokens.scala238
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused535
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Utility.scala170
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/package.scala33
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Formatting.scala258
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Highlighting.scala77
-rw-r--r--compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala500
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Printer.scala105
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Printers.scala14
-rw-r--r--compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala652
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Showable.scala34
-rw-r--r--compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala304
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Texts.scala168
-rw-r--r--compiler/src/dotty/tools/dotc/printing/package.scala17
-rw-r--r--compiler/src/dotty/tools/dotc/repl/AbstractFileClassLoader.scala31
-rw-r--r--compiler/src/dotty/tools/dotc/repl/AmmoniteReader.scala82
-rw-r--r--compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala966
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ConsoleWriter.scala21
-rw-r--r--compiler/src/dotty/tools/dotc/repl/InteractiveReader.scala20
-rw-r--r--compiler/src/dotty/tools/dotc/repl/Interpreter.scala45
-rw-r--r--compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala210
-rw-r--r--compiler/src/dotty/tools/dotc/repl/Main.scala28
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ManifestInfo.scala20
-rw-r--r--compiler/src/dotty/tools/dotc/repl/NewLinePrintWriter.scala11
-rw-r--r--compiler/src/dotty/tools/dotc/repl/REPL.scala100
-rw-r--r--compiler/src/dotty/tools/dotc/repl/SimpleReader.scala24
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/Ansi.scala256
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/Filter.scala61
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala80
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/LICENSE25
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/Protocol.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala81
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/Terminal.scala320
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/Utils.scala169
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala163
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala170
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala334
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala165
-rw-r--r--compiler/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala157
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala63
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala21
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala145
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/Reporter.scala296
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala46
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala20
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala32
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/diagnostic/Message.scala133
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala74
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala902
-rw-r--r--compiler/src/dotty/tools/dotc/rewrite/Rewrites.scala92
-rw-r--r--compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala518
-rw-r--r--compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala268
-rw-r--r--compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala156
-rw-r--r--compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala61
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala59
-rw-r--r--compiler/src/dotty/tools/dotc/transform/AugmentScala2Traits.scala101
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CapturedVars.scala149
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala95
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CheckStatic.scala96
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ClassOf.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala116
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Constructors.scala261
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CrossCastAnd.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CtxLazy.scala23
-rw-r--r--compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled98
-rw-r--r--compiler/src/dotty/tools/dotc/transform/DropInlined.scala15
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ElimByName.scala129
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala84
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala135
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala40
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Erasure.scala664
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala111
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala86
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala362
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala47
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala243
-rw-r--r--compiler/src/dotty/tools/dotc/transform/FirstTransform.scala193
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Flatten.scala47
-rw-r--r--compiler/src/dotty/tools/dotc/transform/FullParameterization.scala263
-rw-r--r--compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala83
-rw-r--r--compiler/src/dotty/tools/dotc/transform/GetClass.scala34
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Getters.scala76
-rw-r--r--compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala131
-rw-r--r--compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala168
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LambdaLift.scala548
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LazyVals.scala418
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LiftTry.scala66
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LinkScala2ImplClasses.scala62
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled95
-rw-r--r--compiler/src/dotty/tools/dotc/transform/MacroTransform.scala70
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Memoize.scala129
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Mixin.scala257
-rw-r--r--compiler/src/dotty/tools/dotc/transform/MixinOps.scala68
-rw-r--r--compiler/src/dotty/tools/dotc/transform/MoveStatics.scala77
-rw-r--r--compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala92
-rw-r--r--compiler/src/dotty/tools/dotc/transform/NormalizeFlags.scala25
-rw-r--r--compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala140
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala94
-rw-r--r--compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala1989
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Pickler.scala108
-rw-r--r--compiler/src/dotty/tools/dotc/transform/PostTyper.scala286
-rw-r--r--compiler/src/dotty/tools/dotc/transform/PrivateToStatic.scala.disabled94
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala115
-rw-r--r--compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala67
-rw-r--r--compiler/src/dotty/tools/dotc/transform/SelectStatic.scala56
-rw-r--r--compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala48
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Splitter.scala121
-rw-r--r--compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala424
-rw-r--r--compiler/src/dotty/tools/dotc/transform/SymUtils.scala117
-rw-r--r--compiler/src/dotty/tools/dotc/transform/SyntheticMethods.scala198
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TailRec.scala384
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TreeChecker.scala452
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala48
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TreeGen.scala26
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TreeTransform.scala1221
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala99
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala124
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TypeUtils.scala34
-rw-r--r--compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala41
-rw-r--r--compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala104
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ValueClasses.scala56
-rw-r--r--compiler/src/dotty/tools/dotc/transform/patmat/Space.scala615
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Applications.scala1351
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Checking.scala557
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ConstFold.scala182
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Docstrings.scala56
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Dynamic.scala104
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala153
-rw-r--r--compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala191
-rw-r--r--compiler/src/dotty/tools/dotc/typer/FrontEnd.scala83
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Implicits.scala844
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ImportInfo.scala117
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Inferencing.scala362
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Inliner.scala539
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Namer.scala1061
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala488
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ReTyper.scala108
-rw-r--r--compiler/src/dotty/tools/dotc/typer/RefChecks.scala1526
-rw-r--r--compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala524
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Typer.scala1952
-rw-r--r--compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala148
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Variances.scala116
-rw-r--r--compiler/src/dotty/tools/dotc/util/Attachment.scala96
-rw-r--r--compiler/src/dotty/tools/dotc/util/Chars.scala96
-rw-r--r--compiler/src/dotty/tools/dotc/util/CommentParsing.scala239
-rw-r--r--compiler/src/dotty/tools/dotc/util/DiffUtil.scala174
-rw-r--r--compiler/src/dotty/tools/dotc/util/DotClass.scala12
-rw-r--r--compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala33
-rw-r--r--compiler/src/dotty/tools/dotc/util/HashSet.scala146
-rw-r--r--compiler/src/dotty/tools/dotc/util/LRUCache.scala100
-rw-r--r--compiler/src/dotty/tools/dotc/util/NameTransformer.scala163
-rw-r--r--compiler/src/dotty/tools/dotc/util/Positions.scala173
-rw-r--r--compiler/src/dotty/tools/dotc/util/Property.scala10
-rw-r--r--compiler/src/dotty/tools/dotc/util/Set.scala27
-rw-r--r--compiler/src/dotty/tools/dotc/util/ShowPickled.scala287
-rw-r--r--compiler/src/dotty/tools/dotc/util/SimpleMap.scala223
-rw-r--r--compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala28
-rw-r--r--compiler/src/dotty/tools/dotc/util/SourceFile.scala145
-rw-r--r--compiler/src/dotty/tools/dotc/util/SourcePosition.scala57
-rw-r--r--compiler/src/dotty/tools/dotc/util/Stats.scala78
-rw-r--r--compiler/src/dotty/tools/dotc/util/Util.scala32
-rw-r--r--compiler/src/dotty/tools/dotc/util/common.scala14
-rw-r--r--compiler/src/dotty/tools/dotc/util/kwords.sc18
-rw-r--r--compiler/src/dotty/tools/dotc/util/lrutest.sc40
-rw-r--r--compiler/src/dotty/tools/io/ClassPath.scala421
-rw-r--r--compiler/src/dotty/tools/io/DaemonThreadFactory.scala16
-rw-r--r--compiler/src/dotty/tools/io/Fileish.scala34
-rw-r--r--compiler/src/dotty/tools/io/Jar.scala172
-rw-r--r--compiler/src/dotty/tools/io/package.scala58
-rw-r--r--compiler/src/dotty/tools/package.scala24
272 files changed, 78495 insertions, 0 deletions
diff --git a/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala b/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
new file mode 100644
index 000000000..2ee1b6011
--- /dev/null
+++ b/compiler/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
@@ -0,0 +1,125 @@
+package dotty.tools.backend.jvm
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Types
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransform, MiniPhase, MiniPhaseTransform}
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc
+import dotty.tools.dotc.backend.jvm.DottyPrimitives
+import dotty.tools.dotc.core.Flags.FlagSet
+import dotty.tools.dotc.transform.Erasure
+import dotty.tools.dotc.transform.SymUtils._
+import java.io.{File => JFile}
+
+import scala.collection.generic.Clearable
+import scala.collection.mutable
+import scala.reflect.ClassTag
+import scala.reflect.internal.util.WeakHashSet
+import scala.reflect.io.{Directory, PlainDirectory, AbstractFile}
+import scala.tools.asm.{ClassVisitor, FieldVisitor, MethodVisitor}
+import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface}
+import dotty.tools.dotc.core._
+import Periods._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Denotations._
+import Phases._
+import java.lang.AssertionError
+import dotty.tools.dotc.util.Positions.Position
+import Decorators._
+import tpd._
+import StdNames.nme
+
+/**
+ * Created by dark on 26/11/14.
+ */
+class CollectEntryPoints extends MiniPhaseTransform {
+ def phaseName: String = "Collect entry points"
+
+ override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if ((tree.symbol ne NoSymbol) && CollectEntryPoints.isJavaEntryPoint(tree.symbol)) {
+ ctx.genBCodePhase.asInstanceOf[GenBCode].registerEntryPoint(tree.symbol)
+ }
+ tree
+ }
+}
+
+object CollectEntryPoints{
+ def isJavaMainMethod(sym: Symbol)(implicit ctx: Context) = {
+ (sym.name == nme.main) && (sym.info match {
+ case r@MethodType(_, List(defn.ArrayOf(t))) =>
+ (t.widenDealias =:= defn.StringType) && (
+ r.resultType.widenDealias =:= defn.UnitType)
+ case _ => false
+ })
+ }
+
+ def isJavaEntryPoint(sym: Symbol)(implicit ctx: Context): Boolean = {
+ import Types.MethodType
+ val d = ctx.definitions
+ val StringType = d.StringType
+ // The given class has a main method.
+ def hasJavaMainMethod(sym: Symbol): Boolean =
+ (toDenot(sym).info member nme.main).alternatives exists(x => isJavaMainMethod(x.symbol))
+
+ def fail(msg: String, pos: Position = sym.pos) = {
+ ctx.warning( sym.name +
+ s" has a main method with parameter type Array[String], but ${toDenot(sym).fullName} will not be a runnable program.\n Reason: $msg",
+ sourcePos(sym.pos)
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + javaName(sym) + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(s"$msg, which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.flags is Flags.Module) (toDenot(sym).info nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(_, p :: Nil) =>
+ p.typeSymbol == defn.ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ {
+ // implicit val c = ctx.withPhase(ctx.erasurePhase)
+
+ val companion = sym.asClass.moduleClass
+
+ if (hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (toDenot(companion).info.member(nme.main) != NoDenotation)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.flags is Flags.Trait)
+ failNoForwarder("companion is a trait")
+ // Now either succeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists(x=> isJavaMainMethod(x.symbol))) || {
+ possibles exists { m =>
+ toDenot(m.symbol).info match {
+ case t: PolyType =>
+ fail("main methods cannot be generic.")
+ case t@MethodType(paramNames, paramTypes) =>
+ if (t.resultType :: paramTypes exists (_.typeSymbol.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.symbol.pos)
+ else
+ isJavaMainMethod(m.symbol) || fail("main method must have exact signature (Array[String])Unit", m.symbol.pos)
+ case tp =>
+ fail(s"don't know what this is: $tp", m.symbol.pos)
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
new file mode 100644
index 000000000..8285bfe4b
--- /dev/null
+++ b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
@@ -0,0 +1,42 @@
+package dotty.tools.backend.jvm
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Flags.Trait
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Collect all super calls to trait members.
+ *
+ * For each super reference to trait member, register a call from the current class to the
+ * owner of the referenced member.
+ *
+ * This information is used to know if it is safe to remove a redundant mixin class.
+ * A redundant mixin class is one that is implemented by another mixin class. As the
+ * methods in a redundant mixin class could be implemented with a default abstract method,
+ * the redundant mixin class could be required as a parent by the JVM.
+ */
+class CollectSuperCalls extends MiniPhaseTransform {
+ import tpd._
+
+ def phaseName: String = "collectSuperCalls"
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ tree.qualifier match {
+ case sup: Super =>
+ if (tree.symbol.owner.is(Trait))
+ registerSuperCall(ctx.owner.enclosingClass.asClass, tree.symbol.owner.asClass)
+ case _ =>
+ }
+ tree
+ }
+
+ private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(implicit ctx: Context) = {
+ ctx.genBCodePhase match {
+ case genBCodePhase: GenBCode =>
+ genBCodePhase.registerSuperCall(sym, calls)
+ case _ =>
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
new file mode 100644
index 000000000..a7c449947
--- /dev/null
+++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
@@ -0,0 +1,1102 @@
+package dotty.tools.backend.jvm
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc
+import dotty.tools.dotc.backend.jvm.DottyPrimitives
+import dotty.tools.dotc.core.Flags.FlagSet
+import dotty.tools.dotc.transform.Erasure
+import dotty.tools.dotc.transform.SymUtils._
+import java.io.{File => JFile}
+
+import scala.collection.generic.Clearable
+import scala.collection.mutable
+import scala.reflect.ClassTag
+import scala.reflect.internal.util.WeakHashSet
+import scala.reflect.io.{AbstractFile, Directory, PlainDirectory}
+import scala.tools.asm.{AnnotationVisitor, ClassVisitor, FieldVisitor, MethodVisitor}
+import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface}
+import dotty.tools.dotc.core._
+import Periods._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Denotations._
+import Phases._
+import java.lang.AssertionError
+
+import dotty.tools.dotc.util.{DotClass, Positions}
+import Decorators._
+import tpd._
+
+import scala.tools.asm
+import NameOps._
+import StdNames.nme
+import NameOps._
+import dotty.tools.dotc.core
+import dotty.tools.dotc.core.Names.TypeName
+
+import scala.annotation.tailrec
+
+class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Map[Symbol, Set[ClassSymbol]])(implicit ctx: Context) extends BackendInterface{
+ type Symbol = Symbols.Symbol
+ type Type = Types.Type
+ type Tree = tpd.Tree
+ type CompilationUnit = dotc.CompilationUnit
+ type Constant = Constants.Constant
+ type Literal = tpd.Literal
+ type Position = Positions.Position
+ type Name = Names.Name
+ type ClassDef = tpd.TypeDef
+ type TypeDef = tpd.TypeDef
+ type Apply = tpd.Apply
+ type TypeApply = tpd.TypeApply
+ type Try = tpd.Try
+ type Assign = tpd.Assign
+ type Ident = tpd.Ident
+ type If = tpd.If
+ type ValDef = tpd.ValDef
+ type Throw = tpd.Apply
+ type Return = tpd.Return
+ type Block = tpd.Block
+ type Typed = tpd.Typed
+ type Match = tpd.Match
+ type This = tpd.This
+ type CaseDef = tpd.CaseDef
+ type Alternative = tpd.Alternative
+ type DefDef = tpd.DefDef
+ type Template = tpd.Template
+ type Select = tpd.Tree // Actually tpd.Select || tpd.Ident
+ type Bind = tpd.Bind
+ type New = tpd.New
+ type Super = tpd.Super
+ type Modifiers = Null
+ type Annotation = Annotations.Annotation
+ type ArrayValue = tpd.JavaSeqLiteral
+ type ApplyDynamic = Null
+ type ModuleDef = Null
+ type LabelDef = tpd.DefDef
+ type Closure = tpd.Closure
+
+ val NoSymbol = Symbols.NoSymbol
+ val NoPosition: Position = Positions.NoPosition
+ val EmptyTree: Tree = tpd.EmptyTree
+
+
+ val UnitTag: ConstantTag = Constants.UnitTag
+ val IntTag: ConstantTag = Constants.IntTag
+ val FloatTag: ConstantTag = Constants.FloatTag
+ val NullTag: ConstantTag = Constants.NullTag
+ val BooleanTag: ConstantTag = Constants.BooleanTag
+ val ByteTag: ConstantTag = Constants.ByteTag
+ val ShortTag: ConstantTag = Constants.ShortTag
+ val CharTag: ConstantTag = Constants.CharTag
+ val DoubleTag: ConstantTag = Constants.DoubleTag
+ val LongTag: ConstantTag = Constants.LongTag
+ val StringTag: ConstantTag = Constants.StringTag
+ val ClazzTag: ConstantTag = Constants.ClazzTag
+ val EnumTag: ConstantTag = Constants.EnumTag
+
+ val nme_This: Name = StdNames.nme.This
+ val nme_EMPTY_PACKAGE_NAME: Name = StdNames.nme.EMPTY_PACKAGE
+ val nme_CONSTRUCTOR: Name = StdNames.nme.CONSTRUCTOR
+ val nme_WILDCARD: Name = StdNames.nme.WILDCARD
+ val nme_THIS: Name = StdNames.nme.THIS
+ val nme_PACKAGE: Name = StdNames.nme.PACKAGE
+ val nme_EQEQ_LOCAL_VAR: Name = StdNames.nme.EQEQ_LOCAL_VAR
+
+ // require LambdaMetafactory: scalac uses getClassIfDefined, but we need those always.
+ override lazy val LambdaMetaFactory = ctx.requiredClass("java.lang.invoke.LambdaMetafactory")
+ override lazy val MethodHandle = ctx.requiredClass("java.lang.invoke.MethodHandle")
+
+ val nme_valueOf: Name = StdNames.nme.valueOf
+ val nme_apply = StdNames.nme.apply
+ val NothingClass: Symbol = defn.NothingClass
+ val NullClass: Symbol = defn.NullClass
+ val ObjectClass: Symbol = defn.ObjectClass
+ val Object_Type: Type = defn.ObjectType
+ val Throwable_Type: Type = defn.ThrowableType
+ val Object_isInstanceOf: Symbol = defn.Any_isInstanceOf
+ val Object_asInstanceOf: Symbol = defn.Any_asInstanceOf
+ val Object_equals: Symbol = defn.Any_equals
+ val ArrayClass: Symbol = defn.ArrayClass
+ val UnitClass: Symbol = defn.UnitClass
+ val BooleanClass: Symbol = defn.BooleanClass
+ val CharClass: Symbol = defn.CharClass
+ val ShortClass: Symbol = defn.ShortClass
+ val ClassClass: Symbol = defn.ClassClass
+ val ByteClass: Symbol = defn.ByteClass
+ val IntClass: Symbol = defn.IntClass
+ val LongClass: Symbol = defn.LongClass
+ val FloatClass: Symbol = defn.FloatClass
+ val DoubleClass: Symbol = defn.DoubleClass
+ def isArrayClone(tree: Tree) = tree match {
+ case Select(qual, StdNames.nme.clone_) if qual.tpe.widen.isInstanceOf[JavaArrayType] => true
+ case _ => false
+ }
+
+ val hashMethodSym: Symbol = NoSymbol // used to dispatch ## on primitives to ScalaRuntime.hash. Should be implemented by a miniphase
+ val externalEqualsNumNum: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum)
+ val externalEqualsNumChar: Symbol = NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private
+ val externalEqualsNumObject: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject)
+ val externalEquals: Symbol = defn.BoxesRunTimeClass.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol
+ val MaxFunctionArity: Int = Definitions.MaxFunctionArity
+ val FunctionClass: Array[Symbol] = defn.FunctionClassPerRun()
+ val AbstractFunctionClass: Array[Symbol] = defn.AbstractFunctionClassPerRun()
+ val PartialFunctionClass: Symbol = defn.PartialFunctionClass
+ val AbstractPartialFunctionClass: Symbol = defn.AbstractPartialFunctionClass
+ val String_valueOf: Symbol = defn.String_valueOf_Object
+ lazy val Predef_classOf: Symbol = defn.ScalaPredefModule.requiredMethod(nme.classOf)
+
+ lazy val AnnotationRetentionAttr = ctx.requiredClass("java.lang.annotation.Retention")
+ lazy val AnnotationRetentionSourceAttr = ctx.requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE")
+ lazy val AnnotationRetentionClassAttr = ctx.requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS")
+ lazy val AnnotationRetentionRuntimeAttr = ctx.requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME")
+ lazy val JavaAnnotationClass = ctx.requiredClass("java.lang.annotation.Annotation")
+
+ def boxMethods: Map[Symbol, Symbol] = defn.ScalaValueClasses().map{x => // @darkdimius Are you sure this should be a def?
+ (x, Erasure.Boxing.boxMethod(x.asClass))
+ }.toMap
+ def unboxMethods: Map[Symbol, Symbol] = defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap
+
+ override def isSyntheticArrayConstructor(s: Symbol) = {
+ s eq defn.newArrayMethod
+ }
+
+ def isBox(sym: Symbol): Boolean = Erasure.Boxing.isBox(sym)
+ def isUnbox(sym: Symbol): Boolean = Erasure.Boxing.isUnbox(sym)
+
+ val primitives: Primitives = new Primitives {
+ val primitives = new DottyPrimitives(ctx)
+ def getPrimitive(app: Apply, reciever: Type): Int = primitives.getPrimitive(app, reciever)
+
+ def getPrimitive(sym: Symbol): Int = primitives.getPrimitive(sym)
+
+ def isPrimitive(fun: Tree): Boolean = primitives.isPrimitive(fun)
+ }
+ implicit val TypeDefTag: ClassTag[TypeDef] = ClassTag[TypeDef](classOf[TypeDef])
+ implicit val ApplyTag: ClassTag[Apply] = ClassTag[Apply](classOf[Apply])
+ implicit val SelectTag: ClassTag[Select] = ClassTag[Select](classOf[Select])
+ implicit val TypeApplyTag: ClassTag[TypeApply] = ClassTag[TypeApply](classOf[TypeApply])
+ implicit val ClassDefTag: ClassTag[ClassDef] = ClassTag[TypeDef](classOf[TypeDef])
+ implicit val TryTag: ClassTag[Try] = ClassTag[Try](classOf[Try])
+ implicit val AssignTag: ClassTag[Assign] = ClassTag[Assign](classOf[Assign])
+ implicit val IdentTag: ClassTag[Ident] = ClassTag[Ident](classOf[Ident])
+ implicit val IfTag: ClassTag[If] = ClassTag[If](classOf[If])
+ implicit val LabelDefTag: ClassTag[LabelDef] = ClassTag[LabelDef](classOf[LabelDef])
+ implicit val ValDefTag: ClassTag[ValDef] = ClassTag[ValDef](classOf[ValDef])
+ implicit val ThrowTag: ClassTag[Throw] = ClassTag[Throw](classOf[Throw])
+ implicit val ReturnTag: ClassTag[Return] = ClassTag[Return](classOf[Return])
+ implicit val LiteralTag: ClassTag[Literal] = ClassTag[Literal](classOf[Literal])
+ implicit val BlockTag: ClassTag[Block] = ClassTag[Block](classOf[Block])
+ implicit val TypedTag: ClassTag[Typed] = ClassTag[Typed](classOf[Typed])
+ implicit val ArrayValueTag: ClassTag[ArrayValue] = ClassTag[ArrayValue](classOf[ArrayValue])
+ implicit val MatchTag: ClassTag[Match] = ClassTag[Match](classOf[Match])
+ implicit val CaseDefTag: ClassTag[CaseDef] = ClassTag[CaseDef](classOf[CaseDef])
+ implicit val ThisTag: ClassTag[This] = ClassTag[This](classOf[This])
+ implicit val AlternativeTag: ClassTag[Alternative] = ClassTag[Alternative](classOf[Alternative])
+ implicit val DefDefTag: ClassTag[DefDef] = ClassTag[DefDef](classOf[DefDef])
+ implicit val ModuleDefTag: ClassTag[ModuleDef] = ClassTag[ModuleDef](classOf[ModuleDef])
+ implicit val NameTag: ClassTag[Name] = ClassTag[Name](classOf[Name])
+ implicit val TemplateTag: ClassTag[Template] = ClassTag[Template](classOf[Template])
+ implicit val BindTag: ClassTag[Bind] = ClassTag[Bind](classOf[Bind])
+ implicit val NewTag: ClassTag[New] = ClassTag[New](classOf[New])
+ implicit val ApplyDynamicTag: ClassTag[ApplyDynamic] = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
+ implicit val SuperTag: ClassTag[Super] = ClassTag[Super](classOf[Super])
+ implicit val ConstantClassTag: ClassTag[Constant] = ClassTag[Constant](classOf[Constant])
+ implicit val ClosureTag: ClassTag[Closure] = ClassTag[Closure](classOf[Closure])
+
+ /* dont emit any annotations for now*/
+ def isRuntimeVisible(annot: Annotation): Boolean = {
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match {
+ case Some(retentionAnnot) =>
+ retentionAnnot.tree.find(_.symbol == AnnotationRetentionRuntimeAttr).isDefined
+ case _ =>
+ // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
+ // annotation is emitted with visibility `RUNTIME`
+ // dotty bug: #389
+ true
+ }
+ }
+
+ def shouldEmitAnnotation(annot: Annotation): Boolean = {
+ annot.symbol.isJavaDefined &&
+ retentionPolicyOf(annot) != AnnotationRetentionSourceAttr &&
+ annot.args.isEmpty
+ }
+
+ private def retentionPolicyOf(annot: Annotation): Symbol =
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).
+ flatMap(_.argument(0).map(_.symbol)).getOrElse(AnnotationRetentionClassAttr)
+
+ private def emitArgument(av: AnnotationVisitor,
+ name: String,
+ arg: Tree, bcodeStore: BCodeHelpers)(innerClasesStore: bcodeStore.BCInnerClassGen): Unit = {
+ (arg: @unchecked) match {
+
+ case Literal(const @ Constant(_)) =>
+ const.tag match {
+ case BooleanTag | ByteTag | ShortTag | CharTag | IntTag | LongTag | FloatTag | DoubleTag => av.visit(name, const.value)
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
+ case ClazzTag => av.visit(name, const.typeValue.toTypeKind(bcodeStore)(innerClasesStore).toASMType)
+ case EnumTag =>
+ val edesc = innerClasesStore.typeDescriptor(const.tpe.asInstanceOf[bcodeStore.int.Type]) // the class descriptor of the enumeration class.
+ val evalue = const.symbolValue.name.toString // value the actual enumeration value.
+ av.visitEnum(name, edesc, evalue)
+ }
+ case t: TypeApply if (t.fun.symbol == Predef_classOf) =>
+ av.visit(name, t.args.head.tpe.classSymbol.denot.info.toTypeKind(bcodeStore)(innerClasesStore).toASMType)
+ case t: tpd.Select =>
+ if (t.symbol.denot.is(Flags.Enum)) {
+ val edesc = innerClasesStore.typeDescriptor(t.tpe.asInstanceOf[bcodeStore.int.Type]) // the class descriptor of the enumeration class.
+ val evalue = t.symbol.name.toString // value the actual enumeration value.
+ av.visitEnum(name, edesc, evalue)
+ } else {
+ assert(toDenot(t.symbol).name.toTermName.defaultGetterIndex >= 0) // this should be default getter. do not emmit.
+ }
+ case t: SeqLiteral =>
+ val arrAnnotV: AnnotationVisitor = av.visitArray(name)
+ for(arg <- t.elems) { emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) }
+ arrAnnotV.visitEnd()
+
+ case Apply(fun, args) if (fun.symbol == defn.ArrayClass.primaryConstructor ||
+ (toDenot(fun.symbol).owner == defn.ArrayClass.linkedClass && fun.symbol.name == nme_apply)) =>
+ val arrAnnotV: AnnotationVisitor = av.visitArray(name)
+
+ var actualArgs = if (fun.tpe.isInstanceOf[ImplicitMethodType]) {
+ // generic array method, need to get implicit argument out of the way
+ fun.asInstanceOf[Apply].args
+ } else args
+
+ val flatArgs = actualArgs.flatMap {
+ case t: tpd.SeqLiteral => t.elems
+ case e => List(e)
+ }
+ for(arg <- flatArgs) { emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) }
+ arrAnnotV.visitEnd()
+/*
+ case sb @ ScalaSigBytes(bytes) =>
+ // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
+ // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
+ if (sb.fitsInOneString) {
+ av.visit(name, BCodeAsmCommon.strEncode(sb))
+ } else {
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- BCodeAsmCommon.arrEncode(sb)) { arrAnnotV.visit(name, arg) }
+ arrAnnotV.visitEnd()
+ } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
+*/
+ case t @ Apply(constr, args) if t.tpe.derivesFrom(JavaAnnotationClass) =>
+ val typ = t.tpe.classSymbol.denot.info
+ val assocs = assocsFromApply(t)
+ val desc = innerClasesStore.typeDescriptor(typ.asInstanceOf[bcodeStore.int.Type]) // the class descriptor of the nested annotation class
+ val nestedVisitor = av.visitAnnotation(name, desc)
+ emitAssocs(nestedVisitor, assocs, bcodeStore)(innerClasesStore)
+ }
+ }
+
+ override def emitAnnotations(cw: asm.ClassVisitor, annotations: List[Annotation], bcodeStore: BCodeHelpers)
+ (innerClasesStore: bcodeStore.BCInnerClassGen) = {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val typ = annot.atp
+ val assocs = annot.assocs
+ val av = cw.visitAnnotation(innerClasesStore.typeDescriptor(typ.asInstanceOf[bcodeStore.int.Type]), isRuntimeVisible(annot))
+ emitAssocs(av, assocs, bcodeStore)(innerClasesStore)
+ }
+ }
+
+ private def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, Object)], bcodeStore: BCodeHelpers)
+ (innerClasesStore: bcodeStore.BCInnerClassGen) = {
+ for ((name, value) <- assocs)
+ emitArgument(av, name.toString, value.asInstanceOf[Tree], bcodeStore)(innerClasesStore)
+ av.visitEnd()
+ }
+
+ override def emitAnnotations(mw: asm.MethodVisitor, annotations: List[Annotation], bcodeStore: BCodeHelpers)
+ (innerClasesStore: bcodeStore.BCInnerClassGen) = {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val typ = annot.atp
+ val assocs = annot.assocs
+ val av = mw.visitAnnotation(innerClasesStore.typeDescriptor(typ.asInstanceOf[bcodeStore.int.Type]), isRuntimeVisible(annot))
+ emitAssocs(av, assocs, bcodeStore)(innerClasesStore)
+ }
+ }
+
+ override def emitAnnotations(fw: asm.FieldVisitor, annotations: List[Annotation], bcodeStore: BCodeHelpers)
+ (innerClasesStore: bcodeStore.BCInnerClassGen) = {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val typ = annot.atp
+ val assocs = annot.assocs
+ val av = fw.visitAnnotation(innerClasesStore.typeDescriptor(typ.asInstanceOf[bcodeStore.int.Type]), isRuntimeVisible(annot))
+ emitAssocs(av, assocs, bcodeStore)(innerClasesStore)
+ }
+ }
+
+ override def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[Annotation]], bcodeStore: BCodeHelpers)
+ (innerClasesStore: bcodeStore.BCInnerClassGen): Unit = {
+ val annotationss = pannotss map (_ filter shouldEmitAnnotation)
+ if (annotationss forall (_.isEmpty)) return
+ for ((annots, idx) <- annotationss.zipWithIndex;
+ annot <- annots) {
+ val typ = annot.atp
+ val assocs = annot.assocs
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, innerClasesStore.typeDescriptor(typ.asInstanceOf[bcodeStore.int.Type]), isRuntimeVisible(annot))
+ emitAssocs(pannVisitor, assocs, bcodeStore)(innerClasesStore)
+ }
+ }
+
+ def getAnnotPickle(jclassName: String, sym: Symbol): Option[Annotation] = None
+
+
+ def getRequiredClass(fullname: String): Symbol = ctx.requiredClass(fullname.toTermName)
+
+ def getClassIfDefined(fullname: String): Symbol = NoSymbol // used only for android. todo: implement
+
+ private def erasureString(clazz: Class[_]): String = {
+ if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]"
+ else clazz.getName
+ }
+
+ def requiredClass[T](implicit evidence: ClassTag[T]): Symbol = {
+ ctx.requiredClass(erasureString(evidence.runtimeClass).toTermName)
+ }
+
+ def requiredModule[T](implicit evidence: ClassTag[T]): Symbol = {
+ val moduleName = erasureString(evidence.runtimeClass)
+ val className = if (moduleName.endsWith("$")) moduleName.dropRight(1) else moduleName
+ ctx.requiredModule(className.toTermName)
+ }
+
+
+ def debuglog(msg: => String): Unit = ctx.debuglog(msg)
+ def informProgress(msg: String): Unit = ctx.informProgress(msg)
+ def log(msg: => String): Unit = ctx.log(msg)
+ def error(pos: Position, msg: String): Unit = ctx.error(msg, pos)
+ def warning(pos: Position, msg: String): Unit = ctx.warning(msg, pos)
+ def abort(msg: String): Nothing = {
+ ctx.error(msg)
+ throw new RuntimeException(msg)
+ }
+
+ def emitAsmp: Option[String] = None
+
+ def shouldEmitJumpAfterLabels = true
+
+ def dumpClasses: Option[String] =
+ if (ctx.settings.Ydumpclasses.isDefault) None
+ else Some(ctx.settings.Ydumpclasses.value)
+
+ def mainClass: Option[String] =
+ if (ctx.settings.mainClass.isDefault) None
+ else Some(ctx.settings.mainClass.value)
+ def setMainClass(name: String): Unit = ctx.settings.mainClass.update(name)
+
+
+ def noForwarders: Boolean = ctx.settings.noForwarders.value
+ def debuglevel: Int = 3 // 0 -> no debug info; 1-> filename; 2-> lines; 3-> varnames
+ def settings_debug: Boolean = ctx.settings.debug.value
+ def targetPlatform: String = ctx.settings.target.value
+
+ val perRunCaches: Caches = new Caches {
+ def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]()
+ def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]()
+ def recordCache[T <: Clearable](cache: T): T = cache
+ def newWeakSet[K <: AnyRef](): WeakHashSet[K] = new WeakHashSet[K]()
+ def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]()
+ def newSet[K](): mutable.Set[K] = new mutable.HashSet[K]
+ }
+
+
+
+ val MODULE_INSTANCE_FIELD: String = nme.MODULE_INSTANCE_FIELD.toString
+
+ def internalNameString(offset: Int, length: Int): String = new String(Names.chrs, offset, length)
+
+ def newTermName(prefix: String): Name = prefix.toTermName
+
+ val Flag_SYNTHETIC: Flags = Flags.Synthetic.bits
+ val Flag_METHOD: Flags = Flags.Method.bits
+ val ExcludedForwarderFlags: Flags = {
+ Flags.Specialized | Flags.Lifted | Flags.Protected | Flags.JavaStatic |
+ Flags.ExpandedName | Flags.Bridge | Flags.VBridge | Flags.Private | Flags.Macro
+ }.bits
+
+
+ def isQualifierSafeToElide(qual: Tree): Boolean = tpd.isIdempotentExpr(qual)
+ def desugarIdent(i: Ident): Option[tpd.Select] = {
+ i.tpe match {
+ case TermRef(prefix: TermRef, name) =>
+ Some(tpd.ref(prefix).select(i.symbol))
+ case TermRef(prefix: ThisType, name) =>
+ Some(tpd.This(prefix.cls).select(i.symbol))
+ case TermRef(NoPrefix, name) =>
+ if (i.symbol is Flags.Method) Some(This(i.symbol.topLevelClass).select(i.symbol)) // workaround #342 todo: remove after fixed
+ else None
+ case _ => None
+ }
+ }
+ def getLabelDefOwners(tree: Tree): Map[Tree, List[LabelDef]] = {
+ // for each rhs of a defdef returns LabelDefs inside this DefDef
+ val res = new collection.mutable.HashMap[Tree, List[LabelDef]]()
+
+ val t = new TreeTraverser {
+ var outerRhs: Tree = tree
+
+ def traverse(tree: tpd.Tree)(implicit ctx: Context): Unit = tree match {
+ case t: DefDef =>
+ if (t.symbol is Flags.Label)
+ res.put(outerRhs, t :: res.getOrElse(outerRhs, Nil))
+ else outerRhs = t
+ traverseChildren(t)
+ case _ => traverseChildren(tree)
+ }
+ }
+
+ t.traverse(tree)
+ res.toMap
+ }
+
+ // todo: remove
+ def isMaybeBoxed(sym: Symbol) = {
+ (sym == ObjectClass) ||
+ (sym == JavaSerializableClass) ||
+ (sym == defn.ComparableClass) ||
+ (sym derivesFrom BoxedNumberClass) ||
+ (sym derivesFrom BoxedCharacterClass) ||
+ (sym derivesFrom BoxedBooleanClass)
+ }
+
+ def getSingleOutput: Option[AbstractFile] = None // todo: implement
+
+
+ def getGenericSignature(sym: Symbol, owner: Symbol): String = null // todo: implement
+
+ def getStaticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = null // todo: implement
+
+
+ def sourceFileFor(cu: CompilationUnit): String = cu.source.file.name
+
+
+
+ implicit def positionHelper(a: Position): PositionHelper = new PositionHelper {
+ def isDefined: Boolean = a.exists
+ def line: Int = sourcePos(a).line + 1
+ def finalPosition: Position = a
+ }
+
+ implicit def constantHelper(a: Constant): ConstantHelper = new ConstantHelper {
+ def booleanValue: Boolean = a.booleanValue
+ def longValue: Long = a.longValue
+ def byteValue: Byte = a.byteValue
+ def stringValue: String = a.stringValue
+ def symbolValue: Symbol = a.symbolValue
+ def floatValue: Float = a.floatValue
+ def value: Any = a.value
+ def tag: ConstantTag = a.tag
+ def typeValue: Type = a.typeValue
+ def shortValue: Short = a.shortValue
+ def intValue: Int = a.intValue
+ def doubleValue: Double = a.doubleValue
+ def charValue: Char = a.charValue
+ }
+
+
+ implicit def treeHelper(a: Tree): TreeHelper = new TreeHelper {
+ def symbol: Symbol = a.symbol
+
+ def pos: Position = a.pos
+
+ def isEmpty: Boolean = a.isEmpty
+
+ def tpe: Type = a.tpe
+
+ def exists(pred: (Tree) => Boolean): Boolean = a.find(pred).isDefined
+ }
+
+
+ implicit def annotHelper(a: Annotation): AnnotationHelper = new AnnotationHelper {
+ def atp: Type = a.tree.tpe
+
+ def assocs: List[(Name, Tree)] = assocsFromApply(a.tree)
+
+ def symbol: Symbol = a.tree.symbol
+
+ def args: List[Tree] = List.empty // those arguments to scala-defined annotations. they are never emmited
+ }
+
+ def assocsFromApply(tree: Tree) = {
+ tree match {
+ case Apply(fun, args) =>
+ fun.tpe.widen match {
+ case MethodType(names, _) =>
+ names zip args
+ }
+ }
+ }
+
+
+ implicit def nameHelper(n: Name): NameHelper = new NameHelper {
+ def toTypeName: Name = n.toTypeName
+ def isTypeName: Boolean = n.isTypeName
+ def toTermName: Name = n.toTermName
+ def dropModule: Name = n.stripModuleClassSuffix
+
+ def len: Int = n.length
+ def offset: Int = n.start
+ def isTermName: Boolean = n.isTermName
+ def startsWith(s: String): Boolean = n.startsWith(s)
+ }
+
+
+ implicit def symHelper(sym: Symbol): SymbolHelper = new SymbolHelper {
+ // names
+ def fullName(sep: Char): String = sym.showFullName
+ def fullName: String = sym.showFullName
+ def simpleName: Name = sym.name
+ def javaSimpleName: Name = toDenot(sym).name // addModuleSuffix(simpleName.dropLocal)
+ def javaBinaryName: Name = toDenot(sym).fullNameSeparated("/") // addModuleSuffix(fullNameInternal('/'))
+ def javaClassName: String = toDenot(sym).fullName.toString// addModuleSuffix(fullNameInternal('.')).toString
+ def name: Name = sym.name
+ def rawname: Name = sym.name // todo ????
+
+ // types
+ def info: Type = toDenot(sym).info
+ def tpe: Type = toDenot(sym).info // todo whats the differentce between tpe and info?
+ def thisType: Type = toDenot(sym).thisType
+
+ // tests
+ def isClass: Boolean = {
+ sym.isPackageObject || (sym.isClass)
+ }
+ def isType: Boolean = sym.isType
+ def isAnonymousClass: Boolean = toDenot(sym).isAnonymousClass
+ def isConstructor: Boolean = toDenot(sym).isConstructor
+ def isAnonymousFunction: Boolean = toDenot(sym).isAnonymousFunction
+ def isMethod: Boolean = sym is Flags.Method
+ def isPublic: Boolean = sym.flags.is(Flags.EmptyFlags, Flags.Private | Flags.Protected)
+ def isSynthetic: Boolean = sym is Flags.Synthetic
+ def isPackageClass: Boolean = sym is Flags.PackageClass
+ def isModuleClass: Boolean = sym is Flags.ModuleClass
+ def isModule: Boolean = sym is Flags.Module
+ def isStrictFP: Boolean = false // todo: implement
+ def isLabel: Boolean = sym is Flags.Label
+ def hasPackageFlag: Boolean = sym is Flags.Package
+ def isImplClass: Boolean = sym is Flags.ImplClass
+ def isInterface: Boolean = (sym is Flags.PureInterface) || (sym is Flags.Trait)
+ def hasGetter: Boolean = false // used only for generaration of beaninfo todo: implement
+ def isGetter: Boolean = toDenot(sym).isGetter
+ def isSetter: Boolean = toDenot(sym).isSetter
+ def isGetClass: Boolean = sym eq defn.Any_getClass
+ def isJavaDefined: Boolean = sym is Flags.JavaDefined
+ def isJavaDefaultMethod: Boolean = !((sym is Flags.Deferred) || toDenot(sym).isClassConstructor)
+ def isDeferred: Boolean = sym is Flags.Deferred
+ def isPrivate: Boolean = sym is Flags.Private
+ def getsJavaFinalFlag: Boolean =
+ isFinal && !toDenot(sym).isClassConstructor && !(sym is Flags.Mutable) && !(sym.enclosingClass is Flags.Trait)
+
+ def getsJavaPrivateFlag: Boolean =
+ isPrivate //|| (sym.isPrimaryConstructor && sym.owner.isTopLevelModuleClass)
+
+ def isFinal: Boolean = sym is Flags.Final
+ def isStaticMember: Boolean = (sym ne NoSymbol) &&
+ ((sym is Flags.JavaStatic) || (owner is Flags.ImplClass) || toDenot(sym).hasAnnotation(ctx.definitions.ScalaStaticAnnot))
+ // guard against no sumbol cause this code is executed to select which call type(static\dynamic) to use to call array.clone
+
+ def isBottomClass: Boolean = (sym ne defn.NullClass) && (sym ne defn.NothingClass)
+ def isBridge: Boolean = sym is Flags.Bridge
+ def isArtifact: Boolean = sym is Flags.Artifact
+ def hasEnumFlag: Boolean = sym is Flags.Enum
+ def hasAccessBoundary: Boolean = sym.accessBoundary(defn.RootClass) ne defn.RootClass
+ def isVarargsMethod: Boolean = sym is Flags.JavaVarargs
+ def isDeprecated: Boolean = false
+ def isMutable: Boolean = sym is Flags.Mutable
+ def hasAbstractFlag: Boolean =
+ (sym is Flags.Abstract) || (sym is Flags.JavaInterface) || (sym is Flags.Trait)
+ def hasModuleFlag: Boolean = sym is Flags.Module
+ def isSynchronized: Boolean = sym is Flags.Synchronized
+ def isNonBottomSubClass(other: Symbol): Boolean = sym.derivesFrom(other)
+ def hasAnnotation(ann: Symbol): Boolean = toDenot(sym).hasAnnotation(ann)
+ def shouldEmitForwarders: Boolean =
+ (sym is Flags.Module) && !(sym is Flags.ImplClass) && sym.isStatic
+ def isJavaEntryPoint: Boolean = CollectEntryPoints.isJavaEntryPoint(sym)
+
+ def isClassConstructor: Boolean = toDenot(sym).isClassConstructor
+
+ /**
+ * True for module classes of modules that are top-level or owned only by objects. Module classes
+ * for such objects will get a MODULE$ flag and a corresponding static initializer.
+ */
+ def isStaticModuleClass: Boolean =
+ (sym is Flags.Module) && {
+ // scalac uses atPickling here
+ // this would not work if modules are created after pickling
+ // for example by specialization
+ val original = toDenot(sym).initial
+ val validity = original.validFor
+ val shiftedContext = ctx.withPhase(validity.phaseId)
+ toDenot(sym)(shiftedContext).isStatic(shiftedContext)
+ }
+
+ def isStaticConstructor: Boolean = (isStaticMember && isClassConstructor) || (sym.name eq core.Names.STATIC_CONSTRUCTOR)
+
+
+ // navigation
+ def owner: Symbol = toDenot(sym).owner
+ def rawowner: Symbol = {
+ originalOwner
+ }
+ def originalOwner: Symbol =
+ // used to populate the EnclosingMethod attribute.
+ // it is very tricky in presence of classes(and annonymous classes) defined inside supper calls.
+ if (sym.exists) {
+ val original = toDenot(sym).initial
+ val validity = original.validFor
+ val shiftedContext = ctx.withPhase(validity.phaseId)
+ val r = toDenot(sym)(shiftedContext).maybeOwner.lexicallyEnclosingClass(shiftedContext)
+ r
+ } else NoSymbol
+ def parentSymbols: List[Symbol] = toDenot(sym).info.parents.map(_.typeSymbol)
+ def superClass: Symbol = {
+ val t = toDenot(sym).asClass.superClass
+ if (t.exists) t
+ else if (sym is Flags.ModuleClass) {
+ // workaround #371
+
+ println(s"Warning: mocking up superclass for $sym")
+ ObjectClass
+ }
+ else t
+ }
+ def enclClass: Symbol = toDenot(sym).enclosingClass
+ def linkedClassOfClass: Symbol = linkedClass
+ def linkedClass: Symbol = {
+ toDenot(sym)(ctx).linkedClass(ctx)
+ } //exitingPickler(sym.linkedClassOfClass)
+ def companionClass: Symbol = toDenot(sym).companionClass
+ def companionModule: Symbol = toDenot(sym).companionModule
+ def companionSymbol: Symbol = if (sym is Flags.Module) companionClass else companionModule
+ def moduleClass: Symbol = toDenot(sym).moduleClass
+ def enclosingClassSym: Symbol = {
+ if (this.isClass) {
+ val ct = ctx.withPhase(ctx.flattenPhase.prev)
+ toDenot(sym)(ct).owner.enclosingClass(ct)
+ }
+ else sym.enclosingClass(ctx.withPhase(ctx.flattenPhase.prev))
+ } //todo is handled specially for JavaDefined symbols in scalac
+
+
+
+ // members
+ def primaryConstructor: Symbol = toDenot(sym).primaryConstructor
+
+ /** For currently compiled classes: All locally defined classes including local classes.
+ * The empty list for classes that are not currently compiled.
+ */
+ def nestedClasses: List[Symbol] = definedClasses(ctx.flattenPhase)
+
+ /** For currently compiled classes: All classes that are declared as members of this class
+ * (but not inherited ones). The empty list for classes that are not currently compiled.
+ */
+ def memberClasses: List[Symbol] = definedClasses(ctx.lambdaLiftPhase)
+
+ private def definedClasses(phase: Phase) =
+ if (sym.isDefinedInCurrentRun)
+ ctx.atPhase(phase) { implicit ctx =>
+ toDenot(sym).info.decls.filter(_.isClass).toList
+ }
+ else Nil
+
+ def annotations: List[Annotation] = Nil
+ def companionModuleMembers: List[Symbol] = {
+ // phase travel to exitingPickler: this makes sure that memberClassesOf only sees member classes,
+ // not local classes of the companion module (E in the exmaple) that were lifted by lambdalift.
+ if (linkedClass.isTopLevelModuleClass) /*exitingPickler*/ linkedClass.memberClasses
+ else Nil
+ }
+ def fieldSymbols: List[Symbol] = {
+ toDenot(sym).info.decls.filter(p => p.isTerm && !p.is(Flags.Method)).toList
+ }
+ def methodSymbols: List[Symbol] =
+ for (f <- toDenot(sym).info.decls.toList if f.isMethod && f.isTerm && !f.isModule) yield f
+ def serialVUID: Option[Long] = None
+
+
+ def freshLocal(cunit: CompilationUnit, name: String, tpe: Type, pos: Position, flags: Flags): Symbol = {
+ ctx.newSymbol(sym, name.toTermName, FlagSet(flags), tpe, NoSymbol, pos)
+ }
+
+ def getter(clz: Symbol): Symbol = decorateSymbol(sym).getter
+ def setter(clz: Symbol): Symbol = decorateSymbol(sym).setter
+
+ def moduleSuffix: String = "" // todo: validate that names already have $ suffix
+ def outputDirectory: AbstractFile = DottyBackendInterface.this.outputDirectory
+ def pos: Position = sym.pos
+
+ def throwsAnnotations: List[Symbol] = Nil
+
+ /**
+ * All interfaces implemented by a class, except for those inherited through the superclass.
+ * Redundant interfaces are removed unless there is a super call to them.
+ */
+ def superInterfaces: List[Symbol] = {
+ val directlyInheritedTraits = decorateSymbol(sym).directlyInheritedTraits
+ val directlyInheritedTraitsSet = directlyInheritedTraits.toSet
+ val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.symbol.asClass.baseClasses.drop(1)).toSet
+ val superCalls = superCallsMap.getOrElse(sym, Set.empty)
+ val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Flags.Trait))
+// if (additional.nonEmpty)
+// println(s"$fullName: adding supertraits $additional")
+ directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional
+ }
+
+ /**
+ * True for module classes of package level objects. The backend will generate a mirror class for
+ * such objects.
+ */
+ def isTopLevelModuleClass: Boolean = sym.isModuleClass && sym.isStatic
+
+ /**
+ * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain.
+ *
+ * The problem is that we are interested in a source-level property. Various phases changed the
+ * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner.
+ * Therefore, `sym.isStatic` is not what we want. For example, in
+ * object T { def f { object U } }
+ * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here.
+ */
+ def isOriginallyStaticOwner: Boolean = sym.isStatic
+
+
+ def addRemoteRemoteExceptionAnnotation: Unit = ()
+
+ def samMethod(): Symbol =
+ toDenot(sym).info.abstractTermMembers.headOption.getOrElse(toDenot(sym).info.member(nme.apply)).symbol
+ }
+
+
+ implicit def typeHelper(tp: Type): TypeHelper = new TypeHelper {
+ def member(string: Name): Symbol = tp.member(string.toTermName).symbol
+
+ def isFinalType: Boolean = tp.typeSymbol is Flags.Final //in scalac checks for type parameters. Why? Aren't they gone by backend?
+
+ def underlying: Type = tp match {
+ case t: TypeProxy => t.underlying
+ case _ => tp
+ }
+
+ def paramTypes: List[Type] = tp.firstParamTypes
+
+ def <:<(other: Type): Boolean = tp <:< other
+
+ def memberInfo(s: Symbol): Type = tp.memberInfo(s)
+
+ def decls: List[Symbol] = tp.decls.map(_.symbol).toList
+
+ def members: List[Symbol] =
+ tp.memberDenots(takeAllFilter, (name, buf) => buf ++= tp.member(name).alternatives).map(_.symbol).toList
+
+ def typeSymbol: Symbol = tp.widenDealias.typeSymbol
+
+ def =:=(other: Type): Boolean = tp =:= other
+
+ def membersBasedOnFlags(excludedFlags: Flags, requiredFlags: Flags): List[Symbol] =
+ tp.membersBasedOnFlags(FlagSet(requiredFlags), FlagSet(excludedFlags)).map(_.symbol).toList
+
+ def resultType: Type = tp.resultType
+
+ def toTypeKind(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = {
+ import ct.bTypes._
+ val defn = ctx.definitions
+ import coreBTypes._
+ import Types._
+ /**
+ * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int.
+ * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType.
+ */
+ def primitiveOrClassToBType(sym: Symbol): BType = {
+ assert(sym.isClass, sym)
+ assert(sym != ArrayClass || isCompilingArray, sym)
+ primitiveTypeMap.getOrElse(sym.asInstanceOf[ct.bTypes.coreBTypes.bTypes.int.Symbol],
+ storage.getClassBTypeAndRegisterInnerClass(sym.asInstanceOf[ct.int.Symbol])).asInstanceOf[BType]
+ }
+
+ /**
+ * When compiling Array.scala, the type parameter T is not erased and shows up in method
+ * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference.
+ */
+ def nonClassTypeRefToBType(sym: Symbol): ClassBType = {
+ assert(sym.isType && isCompilingArray, sym)
+ ObjectReference.asInstanceOf[ct.bTypes.ClassBType]
+ }
+
+ tp.widenDealias match {
+ case JavaArrayType(el) =>ArrayBType(el.toTypeKind(ct)(storage)) // Array type such as Array[Int] (kept by erasure)
+ case t: TypeRef =>
+ t.info match {
+
+ case _ =>
+ if (!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType
+ else primitiveOrClassToBType(t.symbol) // Common reference to a type such as scala.Int or java.lang.String
+ }
+ case Types.ClassInfo(_, sym, _, _, _) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info)
+
+ case t: MethodType => // triggers for LabelDefs
+ t.resultType.toTypeKind(ct)(storage)
+
+ /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for
+ * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning.
+ * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala.
+ */
+ case a @ AnnotatedType(t, _) =>
+ debuglog(s"typeKind of annotated type $a")
+ t.toTypeKind(ct)(storage)
+
+ /* ExistentialType should (probably) be eliminated by erasure. We know they get here for
+ * classOf constants:
+ * class C[T]
+ * class T { final val k = classOf[C[_]] }
+ */
+ /* case e @ ExistentialType(_, t) =>
+ debuglog(s"typeKind of existential type $e")
+ t.toTypeKind(ctx)(storage)*/
+
+ /* The cases below should probably never occur. They are kept for now to avoid introducing
+ * new compiler crashes, but we added a warning. The compiler / library bootstrap and the
+ * test suite don't produce any warning.
+ */
+
+ case tp =>
+ ctx.warning(
+ s"an unexpected type representation reached the compiler backend while compiling $currentUnit: $tp. " +
+ "If possible, please file a bug on issues.scala-lang.org.")
+
+ tp match {
+ case tp: ThisType if tp.cls == ArrayClass => ObjectReference.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test
+ case tp: ThisType => storage.getClassBTypeAndRegisterInnerClass(tp.cls.asInstanceOf[ct.int.Symbol])
+ // case t: SingletonType => primitiveOrClassToBType(t.classSymbol)
+ case t: SingletonType => t.underlying.toTypeKind(ct)(storage)
+ case t: RefinedType => t.parent.toTypeKind(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b))
+ }
+ }
+ }
+
+ def summaryString: String = tp.showSummary
+
+ def params: List[Symbol] =
+ Nil // backend uses this to emmit annotations on parameter lists of forwarders
+ // to static methods of companion class
+ // in Dotty this link does not exists: there is no way to get from method type
+ // to inner symbols of DefDef
+ // todo: somehow handle.
+
+ def parents: List[Type] = tp.parents
+ }
+
+
+
+ object Assign extends AssignDeconstructor {
+ def _1: Tree = field.lhs
+ def _2: Tree = field.rhs
+ }
+
+ object Select extends SelectDeconstructor {
+
+ var desugared: tpd.Select = null
+
+ override def isEmpty: Boolean =
+ desugared eq null
+
+ def _1: Tree = desugared.qualifier
+
+ def _2: Name = desugared.name
+
+ override def unapply(s: Select): this.type = {
+ s match {
+ case t: tpd.Select => desugared = t
+ case t: Ident =>
+ desugarIdent(t) match {
+ case Some(t) => desugared = t
+ case None => desugared = null
+ }
+ case _ => desugared = null
+ }
+
+ this
+ }
+ }
+
+ object Apply extends ApplyDeconstructor {
+ def _1: Tree = field.fun
+ def _2: List[Tree] = field.args
+ }
+
+ object If extends IfDeconstructor {
+ def _1: Tree = field.cond
+ def _2: Tree = field.thenp
+ def _3: Tree = field.elsep
+ }
+
+ object ValDef extends ValDefDeconstructor {
+ def _1: Modifiers = null
+ def _2: Name = field.name
+ def _3: Tree = field.tpt
+ def _4: Tree = field.rhs
+ }
+
+ object ApplyDynamic extends ApplyDynamicDeconstructor {
+ def _1: Tree = ???
+ def _2: List[Tree] = ???
+ }
+
+ // todo: this product1s should also eventually become name-based pattn matching
+ object Literal extends LiteralDeconstructor {
+ def get = field.const
+ }
+
+ object Throw extends ThrowDeconstructor {
+ def get = field.args.head
+
+ override def unapply(s: Throw): DottyBackendInterface.this.Throw.type = {
+ if (s.fun.symbol eq defn.throwMethod) {
+ field = s
+ } else {
+ field = null
+ }
+ this
+ }
+ }
+
+ object New extends NewDeconstructor {
+ def get = field.tpt.tpe
+ }
+
+ object This extends ThisDeconstructor {
+ def get = field.qual.name
+ def apply(s: Symbol): This = tpd.This(s.asClass)
+ }
+
+ object Return extends ReturnDeconstructor {
+ def get = field.expr
+ }
+
+ object Ident extends IdentDeconstructor {
+ def get = field.name
+ }
+
+ object Alternative extends AlternativeDeconstructor {
+ def get = field.trees
+ }
+
+ object Constant extends ConstantDeconstructor {
+ def get = field.value
+ }
+ object ThrownException extends ThrownException {
+ def unapply(a: Annotation): Option[Symbol] = None // todo
+ }
+
+ object Try extends TryDeconstructor {
+ def _1: Tree = field.expr
+ def _2: List[Tree] = field.cases
+ def _3: Tree = field.finalizer
+ }
+
+ object LabelDef extends LabelDeconstructor {
+ def _1: Name = field.name
+ def _2: List[Symbol] = field.vparamss.flatMap(_.map(_.symbol))
+ def _3: Tree = field.rhs
+
+ override def unapply(s: LabelDef): DottyBackendInterface.this.LabelDef.type = {
+ if (s.symbol is Flags.Label) this.field = s
+ else this.field = null
+ this
+ }
+ }
+
+ object Typed extends TypedDeconstrutor {
+ def _1: Tree = field.expr
+ def _2: Tree = field.tpt
+ }
+ object Super extends SuperDeconstructor {
+ def _1: Tree = field.qual
+ def _2: Name = field.mix.name
+ }
+ object ArrayValue extends ArrayValueDeconstructor {
+ def _1: Type = field.tpe match {
+ case JavaArrayType(elem) => elem
+ case _ =>
+ ctx.error(s"JavaSeqArray with type ${field.tpe} reached backend: $field", field.pos)
+ ErrorType
+ }
+ def _2: List[Tree] = field.elems
+ }
+ object Match extends MatchDeconstructor {
+ def _1: Tree = field.selector
+ def _2: List[Tree] = field.cases
+ }
+ object Block extends BlockDeconstructor {
+ def _1: List[Tree] = field.stats
+ def _2: Tree = field.expr
+ }
+ object TypeApply extends TypeApplyDeconstructor {
+ def _1: Tree = field.fun
+ def _2: List[Tree] = field.args
+ }
+ object CaseDef extends CaseDeconstructor {
+ def _1: Tree = field.pat
+ def _2: Tree = field.guard
+ def _3: Tree = field.body
+ }
+
+ object DefDef extends DefDefDeconstructor {
+ def _1: Modifiers = null
+ def _2: Name = field.name
+ def _3: List[TypeDef] = field.tparams
+ def _4: List[List[ValDef]] = field.vparamss
+ def _5: Tree = field.tpt
+ def _6: Tree = field.rhs
+ }
+
+ object ModuleDef extends ModuleDefDeconstructor {
+ def _1: Modifiers = ???
+ def _2: Name = ???
+ def _3: Tree = ???
+ }
+
+ object Template extends TemplateDeconstructor {
+ def _1: List[Tree] = field.parents
+ def _2: ValDef = field.self
+ def _3: List[Tree] = field.constr :: field.body
+ }
+
+ object Bind extends BindDeconstructor {
+ def _1: Name = field.name
+ def _2: Tree = field.body
+ }
+
+ object ClassDef extends ClassDefDeconstructor {
+ def _1: Modifiers = null
+ def _2: Name = field.name
+ def _4: Template = field.rhs.asInstanceOf[Template]
+ def _3: List[TypeDef] = Nil
+ }
+
+ object Closure extends ClosureDeconstructor {
+ def _1 = field.env
+ def _2 = field.meth
+ def _3 = {
+ val t = field.tpt.tpe.typeSymbol
+ if (t.exists) t
+ else {
+ val arity = field.meth.tpe.widenDealias.paramTypes.size - _1.size
+ val returnsUnit = field.meth.tpe.widenDealias.resultType.classSymbol == UnitClass
+ if (returnsUnit)
+ ctx.requiredClass(("scala.compat.java8.JProcedure" + arity).toTermName)
+ else ctx.requiredClass(("scala.compat.java8.JFunction" + arity).toTermName)
+ }
+ }
+ }
+
+ def currentUnit = ctx.compilationUnit
+}
diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala
new file mode 100644
index 000000000..65dcb6c79
--- /dev/null
+++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala
@@ -0,0 +1,433 @@
+package dotty.tools.backend.jvm
+
+import dotty.tools.dotc.CompilationUnit
+import dotty.tools.dotc.ast.Trees.{ValDef, PackageDef}
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Phases.Phase
+import dotty.tools.dotc.core.Names.TypeName
+
+import scala.collection.mutable
+import scala.tools.asm.{CustomAttr, ClassVisitor, MethodVisitor, FieldVisitor}
+import scala.tools.nsc.Settings
+import scala.tools.nsc.backend.jvm._
+import dotty.tools.dotc
+import dotty.tools.dotc.backend.jvm.DottyPrimitives
+import dotty.tools.dotc.transform.Erasure
+
+import dotty.tools.dotc.interfaces
+import java.util.Optional
+
+import scala.reflect.ClassTag
+import dotty.tools.dotc.core._
+import Periods._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Denotations._
+import Phases._
+import java.lang.AssertionError
+import java.io.{ File => JFile }
+import scala.tools.asm
+import scala.tools.asm.tree._
+import dotty.tools.dotc.util.{Positions, DotClass}
+import tpd._
+import StdNames._
+import scala.reflect.io.{Directory, PlainDirectory, AbstractFile}
+
+import scala.tools.nsc.backend.jvm.opt.LocalOpt
+
+class GenBCode extends Phase {
+ def phaseName: String = "genBCode"
+ private val entryPoints = new mutable.HashSet[Symbol]()
+ def registerEntryPoint(sym: Symbol) = entryPoints += sym
+
+ private val superCallsMap = new mutable.HashMap[Symbol, Set[ClassSymbol]]()
+ def registerSuperCall(sym: Symbol, calls: ClassSymbol) = {
+ val old = superCallsMap.getOrElse(sym, Set.empty)
+ superCallsMap.put(sym, old + calls)
+ }
+
+ def outputDir(implicit ctx: Context): AbstractFile =
+ new PlainDirectory(new Directory(new JFile(ctx.settings.d.value)))
+
+ def run(implicit ctx: Context): Unit = {
+ new GenBCodePipeline(entryPoints.toList,
+ new DottyBackendInterface(outputDir, superCallsMap.toMap)(ctx))(ctx).run(ctx.compilationUnit.tpdTree)
+ entryPoints.clear()
+ }
+}
+
+class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInterface)(implicit val ctx: Context) extends BCodeSyncAndTry{
+
+ var tree: Tree = _
+
+ val sourceFile = ctx.compilationUnit.source
+
+ /** Convert a `scala.reflect.io.AbstractFile` into a
+ * `dotty.tools.dotc.interfaces.AbstractFile`.
+ */
+ private[this] def convertAbstractFile(absfile: scala.reflect.io.AbstractFile): interfaces.AbstractFile =
+ new interfaces.AbstractFile {
+ override def name = absfile.name
+ override def path = absfile.path
+ override def jfile = Optional.ofNullable(absfile.file)
+ }
+
+ final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit)
+
+
+// class BCodePhase() {
+
+ private var bytecodeWriter : BytecodeWriter = null
+ private var mirrorCodeGen : JMirrorBuilder = null
+ private var beanInfoCodeGen : JBeanInfoBuilder = null
+
+ /* ---------------- q1 ---------------- */
+
+ case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) {
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+ private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit)
+ private val q1 = new java.util.LinkedList[Item1]
+
+ /* ---------------- q2 ---------------- */
+
+ case class Item2(arrivalPos: Int,
+ mirror: asm.tree.ClassNode,
+ plain: asm.tree.ClassNode,
+ bean: asm.tree.ClassNode,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+
+ private val poison2 = Item2(Int.MaxValue, null, null, null, null)
+ private val q2 = new _root_.java.util.LinkedList[Item2]
+
+ /* ---------------- q3 ---------------- */
+
+ /*
+ * An item of queue-3 (the last queue before serializing to disk) contains three of these
+ * (one for each of mirror, plain, and bean classes).
+ *
+ * @param jclassName internal name of the class
+ * @param jclassBytes bytecode emitted for the class SubItem3 represents
+ */
+ case class SubItem3(
+ jclassName: String,
+ jclassBytes: Array[Byte]
+ )
+
+ case class Item3(arrivalPos: Int,
+ mirror: SubItem3,
+ plain: SubItem3,
+ bean: SubItem3,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
+
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+ private val i3comparator = new java.util.Comparator[Item3] {
+ override def compare(a: Item3, b: Item3) = {
+ if (a.arrivalPos < b.arrivalPos) -1
+ else if (a.arrivalPos == b.arrivalPos) 0
+ else 1
+ }
+ }
+ private val poison3 = Item3(Int.MaxValue, null, null, null, null)
+ private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator)
+
+ /*
+ * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2
+ */
+ class Worker1(needsOutFolder: Boolean) {
+
+ val caseInsensitively = scala.collection.mutable.Map.empty[String, Symbol]
+
+ def run(): Unit = {
+ while (true) {
+ val item = q1.poll
+ if (item.isPoison) {
+ q2 add poison2
+ return
+ }
+ else {
+ try { /*withCurrentUnit(item.cunit)*/(visit(item)) }
+ catch {
+ case ex: Throwable =>
+ ex.printStackTrace()
+ ctx.error(s"Error while emitting ${int.sourceFileFor(item.cunit)}\n${ex.getMessage}")
+ }
+ }
+ }
+ }
+
+ /*
+ * Checks for duplicate internal names case-insensitively,
+ * builds ASM ClassNodes for mirror, plain, and bean classes;
+ * enqueues them in queue-2.
+ *
+ */
+ def visit(item: Item1) = {
+ val Item1(arrivalPos, cd, cunit) = item
+ val claszSymbol = cd.symbol
+
+ // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739
+ // todo: add back those checks
+ /*val lowercaseJavaClassName = claszSymbol.javaClassName.toLowerCase
+ caseInsensitively.get(lowercaseJavaClassName) match {
+ case None =>
+ caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
+ case Some(dupClassSym) =>
+ reporter.warning(
+ claszSymbol.pos,
+ s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
+ "Such classes will overwrite one another on case-insensitive filesystems."
+ )
+ }*/
+
+ // -------------- mirror class, if needed --------------
+ val mirrorC =
+ if (int.symHelper(claszSymbol).isTopLevelModuleClass) {
+ if (claszSymbol.companionClass == NoSymbol) {
+ mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
+ } else {
+ ctx.log(s"No mirror class for module with linked class: ${claszSymbol.fullName}")
+ null
+ }
+ } else null
+
+ // -------------- "plain" class --------------
+ val pcb = new PlainClassBuilder(cunit)
+ pcb.genPlainClass(cd)
+ val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null;
+ val plainC = pcb.cnode
+
+ if (claszSymbol.isClass) // @DarkDimius is this test needed here?
+ for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) {
+ val dataAttr = new CustomAttr(nme.TASTYATTR.toString, binary)
+ (if (mirrorC ne null) mirrorC else plainC).visitAttribute(dataAttr)
+ }
+
+ // -------------- bean info class, if needed --------------
+ val beanC =
+ if (claszSymbol hasAnnotation int.BeanInfoAttr) {
+ beanInfoCodeGen.genBeanInfoClass(
+ claszSymbol, cunit,
+ int.symHelper(claszSymbol).fieldSymbols,
+ int.symHelper(claszSymbol).methodSymbols
+ )
+ } else null
+
+ // ----------- hand over to pipeline-2
+
+ val item2 =
+ Item2(arrivalPos,
+ mirrorC, plainC, beanC,
+ outF)
+
+ q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done.
+
+ } // end of method visit(Item1)
+
+ } // end of class BCodePhase.Worker1
+
+ /*
+ * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level:
+ *
+ * (a) no optimization involves:
+ * - converting the plain ClassNode to byte array and placing it on queue-3
+ */
+ class Worker2 {
+ lazy val localOpt = new LocalOpt(new Settings())
+
+ def localOptimizations(classNode: ClassNode): Unit = {
+ /*BackendStats.timed(BackendStats.methodOptTimer)*/(localOpt.methodOptimizations(classNode))
+ }
+
+ def run(): Unit = {
+ while (true) {
+ val item = q2.poll
+ if (item.isPoison) {
+ q3 add poison3
+ return
+ }
+ else {
+ try {
+ localOptimizations(item.plain)
+ addToQ3(item)
+ } catch {
+ case ex: Throwable =>
+ ex.printStackTrace()
+ ctx.error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}")
+ }
+ }
+ }
+ }
+
+ private def addToQ3(item: Item2) = {
+
+ def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = {
+ val cw = new CClassWriter(extraProc)
+ cn.accept(cw)
+ cw.toByteArray
+ }
+
+ val Item2(arrivalPos, mirror, plain, bean, outFolder) = item
+
+ val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror))
+ val plainC = SubItem3(plain.name, getByteArray(plain))
+ val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean))
+
+ if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) {
+ if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes)
+ AsmUtils.traceClass(plainC.jclassBytes)
+ if (beanC != null) AsmUtils.traceClass(beanC.jclassBytes)
+ }
+
+ q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder)
+
+ }
+
+ } // end of class BCodePhase.Worker2
+
+ var arrivalPos = 0
+
+ /*
+ * A run of the BCodePhase phase comprises:
+ *
+ * (a) set-up steps (most notably supporting maps in `BCodeTypes`,
+ * but also "the" writer where class files in byte-array form go)
+ *
+ * (b) building of ASM ClassNodes, their optimization and serialization.
+ *
+ * (c) tear down (closing the classfile-writer and clearing maps)
+ *
+ */
+ def run(t: Tree) = {
+ this.tree = t
+
+ // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer)
+
+ // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer)
+ arrivalPos = 0 // just in case
+ // scalaPrimitives.init()
+ bTypes.intializeCoreBTypes()
+ // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart)
+
+ // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated.
+ bytecodeWriter = initBytecodeWriter(entryPoints)
+ mirrorCodeGen = new JMirrorBuilder
+ beanInfoCodeGen = new JBeanInfoBuilder
+
+ val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+ buildAndSendToDisk(needsOutfileForSymbol)
+
+ // closing output files.
+ bytecodeWriter.close()
+ // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart)
+
+ if (ctx.compilerCallback != null)
+ ctx.compilerCallback.onSourceCompiled(sourceFile)
+
+ /* TODO Bytecode can be verified (now that all classfiles have been written to disk)
+ *
+ * (1) asm.util.CheckAdapter.verify()
+ * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
+ * passing a custom ClassLoader to verify inter-dependent classes.
+ * Alternatively,
+ * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
+ * - -Xverify:all
+ *
+ * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()`
+ *
+ */
+ }
+
+ /*
+ * Sequentially:
+ * (a) place all ClassDefs in queue-1
+ * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2
+ * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3
+ * (d) serialize to disk by draining queue-3.
+ */
+ private def buildAndSendToDisk(needsOutFolder: Boolean) = {
+
+ feedPipeline1()
+ // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat)
+ (new Worker1(needsOutFolder)).run()
+ // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart)
+
+ (new Worker2).run()
+
+ // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer)
+ drainQ3()
+ // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart)
+
+ }
+
+ /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */
+ private def feedPipeline1() = {
+ def gen(tree: Tree): Unit = {
+ tree match {
+ case EmptyTree => ()
+ case PackageDef(_, stats) => stats foreach gen
+ case ValDef(name, tpt, rhs) => () // module val not emitted
+ case cd: TypeDef =>
+ q1 add Item1(arrivalPos, cd, int.currentUnit)
+ arrivalPos += 1
+ }
+ }
+ gen(tree)
+ q1 add poison1
+ }
+
+ /* Pipeline that writes classfile representations to disk. */
+ private def drainQ3() = {
+
+ def sendToDisk(cfr: SubItem3, outFolder: scala.tools.nsc.io.AbstractFile): Unit = {
+ if (cfr != null){
+ val SubItem3(jclassName, jclassBytes) = cfr
+ try {
+ val outFile =
+ if (outFolder == null) null
+ else getFileForClassfile(outFolder, jclassName, ".class")
+ bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile)
+
+ val className = jclassName.replace('/', '.')
+ if (ctx.compilerCallback != null)
+ ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), className)
+ if (ctx.sbtCallback != null)
+ ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className)
+ }
+ catch {
+ case e: FileConflictException =>
+ ctx.error(s"error writing $jclassName: ${e.getMessage}")
+ }
+ }
+ }
+
+ var moreComing = true
+ // `expected` denotes the arrivalPos whose Item3 should be serialized next
+ var expected = 0
+
+ while (moreComing) {
+ val incoming = q3.poll
+ moreComing = !incoming.isPoison
+ if (moreComing) {
+ val item = incoming
+ val outFolder = item.outFolder
+ sendToDisk(item.mirror, outFolder)
+ sendToDisk(item.plain, outFolder)
+ sendToDisk(item.bean, outFolder)
+ expected += 1
+ }
+ }
+
+ // we're done
+ assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1")
+ assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2")
+ assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3")
+
+ }
+ //} // end of class BCodePhase
+}
diff --git a/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala b/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala
new file mode 100644
index 000000000..371396e36
--- /dev/null
+++ b/compiler/src/dotty/tools/backend/jvm/LabelDefs.scala
@@ -0,0 +1,223 @@
+package dotty.tools.backend.jvm
+
+import dotty.tools.dotc.ast.Trees.Thicket
+import dotty.tools.dotc.ast.{Trees, tpd}
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Types
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransform, MiniPhase, MiniPhaseTransform}
+import dotty.tools.dotc
+import dotty.tools.dotc.backend.jvm.DottyPrimitives
+import dotty.tools.dotc.core.Flags.FlagSet
+import dotty.tools.dotc.transform.Erasure
+import dotty.tools.dotc.transform.SymUtils._
+import java.io.{File => JFile}
+
+import scala.collection.generic.Clearable
+import scala.collection.mutable
+import scala.collection.mutable.{ListBuffer, ArrayBuffer}
+import scala.reflect.ClassTag
+import scala.reflect.internal.util.WeakHashSet
+import scala.reflect.io.{Directory, PlainDirectory, AbstractFile}
+import scala.tools.asm.{ClassVisitor, FieldVisitor, MethodVisitor}
+import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface}
+import dotty.tools.dotc.core._
+import Periods._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Denotations._
+import Phases._
+import java.lang.AssertionError
+import dotty.tools.dotc.util.Positions.Position
+import Decorators._
+import tpd._
+import Flags._
+import StdNames.nme
+
+/**
+ * Verifies that each Label DefDef has only a single address to jump back and
+ * reorders them such that they are not nested and this address is a fall-through address for JVM
+ *
+ * ei such code
+ *
+ *
+ * <label> def foo(i: Int) = {
+ * <label> def bar = 0
+ * <label> def dough(i: Int) = if (i == 0) bar else foo(i-1)
+ * dough(i)
+ * }
+ *
+ * foo(100)
+ *
+ * will get rewritten to
+ *
+ * \
+ * <label> def foo(i: Int) = dough(i)
+ * <label> def dough(i: Int) = if (i == 0) bar else foo(i-1)
+ * <label> def bar = 2
+ * foo(100)
+ *
+ * Proposed way to generate this pattern in backend is:
+ *
+ * foo(100)
+ * <jump foo>
+ * <label> def foo(i: Int) = dough(i)
+ * // <jump a> // unreachable
+ * <label> def dough(i: Int) = if (i == 0) bar else foo(i-1)
+ * // <jump a> // unreachable
+ * <label> def bar = 2
+ * // <jump a> // unreachable
+ * <asm point a>
+ *
+ * Unreachable jumps will be eliminated by local dead code analysis.
+ * After JVM is smart enough to remove next-line jumps
+ *
+ * Note that Label DefDefs can be only nested in Block, otherwise no one would be able to call them
+ * Other DefDefs are eliminated
+ */
+class LabelDefs extends MiniPhaseTransform {
+ def phaseName: String = "labelDef"
+
+ val queue = new ArrayBuffer[Tree]()
+ val beingAppended = new mutable.HashSet[Symbol]()
+ var labelLevel = 0
+
+ override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (tree.symbol is Flags.Label) tree
+ else {
+ collectLabelDefs.clear
+ val newRhs = collectLabelDefs.transform(tree.rhs)
+ val labelCalls = collectLabelDefs.labelCalls
+ var entryPoints = collectLabelDefs.parentLabelCalls
+ var labelDefs = collectLabelDefs.labelDefs
+ var callCounts = collectLabelDefs.callCounts
+
+ // make sure that for every label there's a single location it should return and single entry point
+ // if theres already a location that it returns to that's a failure
+ val disallowed = new mutable.HashMap[Symbol, Tree]()
+ queue.sizeHint(labelCalls.size + entryPoints.size)
+
+ def putLabelDefsNearCallees = new TreeMap() {
+
+ override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = {
+ tree match {
+ case t: Apply if (entryPoints.contains(t)) =>
+ entryPoints = entryPoints - t
+ labelLevel = labelLevel + 1
+ val r = Block(moveLabels(t), t)
+ labelLevel = labelLevel - 1
+ if (labelLevel == 0) beingAppended.clear()
+ r
+ case _ => if (entryPoints.nonEmpty && labelDefs.nonEmpty) super.transform(tree) else tree
+ }
+
+ }
+ }
+
+ def moveLabels(entryPoint: Apply): List[Tree] = {
+ val entrySym = entryPoint.symbol
+ if ((entrySym is Flags.Label) && labelDefs.contains(entrySym)) {
+ val visitedNow = new mutable.HashMap[Symbol, Tree]()
+ val treesToAppend = new ArrayBuffer[Tree]() // order matters. parents should go first
+ treesToAppend += labelDefs(entrySym)
+ queue.clear()
+
+ var visited = 0
+ queue += entryPoint
+ while (visited < queue.size) {
+ val owningLabelDefSym = queue(visited).symbol
+ for (call <- labelCalls(owningLabelDefSym)) {
+ val callSym = call.symbol
+ if (!beingAppended.contains(callSym)) {
+ if (disallowed.contains(callSym)) {
+ val oldCall = disallowed(callSym)
+ ctx.error(s"Multiple return locations for Label $oldCall and $call", callSym.pos)
+ } else {
+ if ((!visitedNow.contains(callSym)) && labelDefs.contains(callSym)) {
+ val defTree = labelDefs(callSym)
+ visitedNow.put(callSym, defTree)
+ val callCount = callCounts(callSym)
+ if (callCount > 1) {
+ if (!treesToAppend.contains(defTree)) {
+ treesToAppend += defTree
+ queue += call
+
+ }
+ } else if (entryPoint.symbol ne callSym) entryPoints += call
+ }
+ }
+ }
+ }
+
+ visited += 1
+ }
+ beingAppended ++= treesToAppend.map(_.symbol)
+ treesToAppend.toList.map(putLabelDefsNearCallees.transform)
+ } else Nil
+ }
+
+
+ val res = cpy.DefDef(tree)(rhs = putLabelDefsNearCallees.transform(newRhs))
+
+ res
+ }
+ }
+
+ object collectLabelDefs extends TreeMap() {
+
+ // label calls from this DefDef
+ var parentLabelCalls: mutable.Set[Tree] = new mutable.HashSet[Tree]()
+ var callCounts: mutable.Map[Symbol, Int] = new mutable.HashMap[Symbol, Int]().withDefaultValue(0)
+
+ def shouldMoveLabel = true
+
+ // labelSymbol -> Defining tree
+ val labelDefs = new mutable.HashMap[Symbol, Tree]()
+ // owner -> all calls by this owner
+ val labelCalls = new mutable.HashMap[Symbol, mutable.Set[Tree]]()
+ var owner: Symbol = null
+
+ def clear = {
+ parentLabelCalls.clear()
+ labelDefs.clear()
+ labelCalls.clear()
+ }
+
+ override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match {
+ case t: Template => t
+ case t: Block =>
+ val r = super.transform(t)
+ r match {
+ case t: Block if t.stats.isEmpty => t.expr
+ case _ => r
+ }
+ case t: DefDef =>
+ assert(t.symbol is Flags.Label)
+
+ val st = parentLabelCalls
+ parentLabelCalls = new mutable.HashSet[Tree]()
+ val symt = owner
+ owner = t.symbol
+
+ val r = super.transform(tree)
+
+ owner = symt
+ labelCalls(r.symbol) = parentLabelCalls
+ parentLabelCalls = st
+
+ if (shouldMoveLabel) {
+ labelDefs(r.symbol) = r
+ EmptyTree
+ } else r
+ case t: Apply if t.symbol is Flags.Label =>
+ val sym = t.symbol
+ parentLabelCalls = parentLabelCalls + t
+ if (owner != sym) callCounts(sym) = callCounts(sym) + 1
+ super.transform(tree)
+ case _ =>
+ super.transform(tree)
+
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
new file mode 100644
index 000000000..0027defa7
--- /dev/null
+++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
@@ -0,0 +1,417 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools.dotc
+package backend.jvm
+
+import dotty.tools.backend.jvm.GenBCodePipeline
+import dotty.tools.dotc.ast.Trees.Select
+import dotty.tools.dotc.ast.tpd._
+import dotty.tools.dotc.core.Names.TermName
+import dotty.tools.dotc.core.StdNames
+import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.Types.{JavaArrayType, ErrorType, Type}
+
+import scala.collection.{ mutable, immutable }
+
+import core.Contexts.Context
+import core.Symbols.{Symbol, NoSymbol}
+
+/** Scala primitive operations are represented as methods in `Any` and
+ * `AnyVal` subclasses. Here we demultiplex them by providing a mapping
+ * from their symbols to integers. Different methods exist for
+ * different value types, but with the same meaning (like plus, minus,
+ * etc.). They will all be mapped to the same int.
+ *
+ * Note: The three equal methods have the following semantics:
+ * - `"=="` checks for `null`, and if non-null, calls
+ * `java.lang.Object.equals`
+ * `(class: Any; modifier: final)`. Primitive: `EQ`
+ * - `"eq"` usual reference comparison
+ * `(class: AnyRef; modifier: final)`. Primitive: `ID`
+ * - `"equals"` user-defined equality (Java semantics)
+ * `(class: Object; modifier: none)`. Primitive: `EQUALS`
+ *
+ * Inspired from the `scalac` compiler.
+ */
+class DottyPrimitives(ctx: Context) {
+ import scala.tools.nsc.backend.ScalaPrimitives._
+
+ private lazy val primitives: immutable.Map[Symbol, Int] = init
+
+ /** Return the code for the given symbol. */
+ def getPrimitive(sym: Symbol): Int = {
+ primitives(sym)
+ }
+
+ /**
+ * Return the primitive code of the given operation. If the
+ * operation is an array get/set, we inspect the type of the receiver
+ * to demux the operation.
+ *
+ * @param fun The method symbol
+ * @param tpe The type of the receiver object. It is used only for array
+ * operations
+ */
+ def getPrimitive(app: Apply, tpe: Type)(implicit ctx: Context): Int = {
+ val fun = app.fun.symbol
+ val defn = ctx.definitions
+ val code = app.fun match {
+ case Select(_, nme.primitive.arrayLength) =>
+ LENGTH
+ case Select(_, nme.primitive.arrayUpdate) =>
+ UPDATE
+ case Select(_, nme.primitive.arrayApply) =>
+ APPLY
+ case _ => getPrimitive(fun)
+ }
+
+ def elementType: Type = tpe.widenDealias match {
+ case defn.ArrayOf(el) => el
+ case JavaArrayType(el) => el
+ case _ =>
+ ctx.error(s"expected Array $tpe")
+ ErrorType
+ }
+
+ code match {
+
+ case APPLY =>
+ defn.scalaClassName(elementType) match {
+ case tpnme.Boolean => ZARRAY_GET
+ case tpnme.Byte => BARRAY_GET
+ case tpnme.Short => SARRAY_GET
+ case tpnme.Char => CARRAY_GET
+ case tpnme.Int => IARRAY_GET
+ case tpnme.Long => LARRAY_GET
+ case tpnme.Float => FARRAY_GET
+ case tpnme.Double => DARRAY_GET
+ case _ => OARRAY_GET
+ }
+
+ case UPDATE =>
+ defn.scalaClassName(elementType) match {
+ case tpnme.Boolean => ZARRAY_SET
+ case tpnme.Byte => BARRAY_SET
+ case tpnme.Short => SARRAY_SET
+ case tpnme.Char => CARRAY_SET
+ case tpnme.Int => IARRAY_SET
+ case tpnme.Long => LARRAY_SET
+ case tpnme.Float => FARRAY_SET
+ case tpnme.Double => DARRAY_SET
+ case _ => OARRAY_SET
+ }
+
+ case LENGTH =>
+ defn.scalaClassName(elementType) match {
+ case tpnme.Boolean => ZARRAY_LENGTH
+ case tpnme.Byte => BARRAY_LENGTH
+ case tpnme.Short => SARRAY_LENGTH
+ case tpnme.Char => CARRAY_LENGTH
+ case tpnme.Int => IARRAY_LENGTH
+ case tpnme.Long => LARRAY_LENGTH
+ case tpnme.Float => FARRAY_LENGTH
+ case tpnme.Double => DARRAY_LENGTH
+ case _ => OARRAY_LENGTH
+ }
+
+ case _ =>
+ code
+ }
+ }
+
+ /** Initialize the primitive map */
+ private def init: immutable.Map[Symbol, Int] = {
+
+ implicit val ctx: Context = this.ctx
+
+ import core.Symbols.defn
+ val primitives = new mutable.HashMap[Symbol, Int]()
+
+ /** Add a primitive operation to the map */
+ def addPrimitive(s: Symbol, code: Int): Unit = {
+ assert(!(primitives contains s), "Duplicate primitive " + s)
+ primitives(s) = code
+ }
+
+ def addPrimitives(cls: Symbol, method: TermName, code: Int)(implicit ctx: Context): Unit = {
+ val alts = cls.info.member(method).alternatives.map(_.symbol)
+ if (alts.isEmpty)
+ ctx.error(s"Unknown primitive method $cls.$method")
+ else alts foreach (s =>
+ addPrimitive(s,
+ s.info.paramTypess match {
+ case List(tp :: _) if code == ADD && tp =:= ctx.definitions.StringType => CONCAT
+ case _ => code
+ }
+ )
+ )
+ }
+
+ // scala.Any
+ addPrimitive(defn.Any_==, EQ)
+ addPrimitive(defn.Any_!=, NE)
+ addPrimitive(defn.Any_isInstanceOf, IS)
+ addPrimitive(defn.Any_asInstanceOf, AS)
+ addPrimitive(defn.Any_##, HASH)
+
+ // java.lang.Object
+ addPrimitive(defn.Object_eq, ID)
+ addPrimitive(defn.Object_ne, NI)
+ /* addPrimitive(defn.Any_==, EQ)
+ addPrimitive(defn.Any_!=, NE)*/
+ addPrimitive(defn.Object_synchronized, SYNCHRONIZED)
+ /*addPrimitive(defn.Any_isInstanceOf, IS)
+ addPrimitive(defn.Any_asInstanceOf, AS)*/
+
+ // java.lang.String
+ addPrimitive(defn.String_+, CONCAT)
+
+ import core.StdNames.nme
+
+ // scala.Array
+ lazy val ArrayClass = defn.ArrayClass
+ addPrimitives(ArrayClass, nme.length, LENGTH)
+ addPrimitives(ArrayClass, nme.apply, APPLY)
+ addPrimitives(ArrayClass, nme.update, UPDATE)
+
+ // scala.Boolean
+ lazy val BooleanClass = defn.BooleanClass
+ addPrimitives(BooleanClass, nme.EQ, EQ)
+ addPrimitives(BooleanClass, nme.NE, NE)
+ addPrimitives(BooleanClass, nme.UNARY_!, ZNOT)
+ addPrimitives(BooleanClass, nme.ZOR, ZOR)
+ addPrimitives(BooleanClass, nme.ZAND, ZAND)
+ addPrimitives(BooleanClass, nme.OR, OR)
+ addPrimitives(BooleanClass, nme.AND, AND)
+ addPrimitives(BooleanClass, nme.XOR, XOR)
+
+ // scala.Byte
+ lazy val ByteClass = defn.ByteClass
+ addPrimitives(ByteClass, nme.EQ, EQ)
+ addPrimitives(ByteClass, nme.NE, NE)
+ addPrimitives(ByteClass, nme.ADD, ADD)
+ addPrimitives(ByteClass, nme.SUB, SUB)
+ addPrimitives(ByteClass, nme.MUL, MUL)
+ addPrimitives(ByteClass, nme.DIV, DIV)
+ addPrimitives(ByteClass, nme.MOD, MOD)
+ addPrimitives(ByteClass, nme.LT, LT)
+ addPrimitives(ByteClass, nme.LE, LE)
+ addPrimitives(ByteClass, nme.GT, GT)
+ addPrimitives(ByteClass, nme.GE, GE)
+ addPrimitives(ByteClass, nme.XOR, XOR)
+ addPrimitives(ByteClass, nme.OR, OR)
+ addPrimitives(ByteClass, nme.AND, AND)
+ addPrimitives(ByteClass, nme.LSL, LSL)
+ addPrimitives(ByteClass, nme.LSR, LSR)
+ addPrimitives(ByteClass, nme.ASR, ASR)
+ // conversions
+ addPrimitives(ByteClass, nme.toByte, B2B)
+ addPrimitives(ByteClass, nme.toShort, B2S)
+ addPrimitives(ByteClass, nme.toChar, B2C)
+ addPrimitives(ByteClass, nme.toInt, B2I)
+ addPrimitives(ByteClass, nme.toLong, B2L)
+ // unary methods
+ addPrimitives(ByteClass, nme.UNARY_+, POS)
+ addPrimitives(ByteClass, nme.UNARY_-, NEG)
+ addPrimitives(ByteClass, nme.UNARY_~, NOT)
+
+ addPrimitives(ByteClass, nme.toFloat, B2F)
+ addPrimitives(ByteClass, nme.toDouble, B2D)
+
+ // scala.Short
+ lazy val ShortClass = defn.ShortClass
+ addPrimitives(ShortClass, nme.EQ, EQ)
+ addPrimitives(ShortClass, nme.NE, NE)
+ addPrimitives(ShortClass, nme.ADD, ADD)
+ addPrimitives(ShortClass, nme.SUB, SUB)
+ addPrimitives(ShortClass, nme.MUL, MUL)
+ addPrimitives(ShortClass, nme.DIV, DIV)
+ addPrimitives(ShortClass, nme.MOD, MOD)
+ addPrimitives(ShortClass, nme.LT, LT)
+ addPrimitives(ShortClass, nme.LE, LE)
+ addPrimitives(ShortClass, nme.GT, GT)
+ addPrimitives(ShortClass, nme.GE, GE)
+ addPrimitives(ShortClass, nme.XOR, XOR)
+ addPrimitives(ShortClass, nme.OR, OR)
+ addPrimitives(ShortClass, nme.AND, AND)
+ addPrimitives(ShortClass, nme.LSL, LSL)
+ addPrimitives(ShortClass, nme.LSR, LSR)
+ addPrimitives(ShortClass, nme.ASR, ASR)
+ // conversions
+ addPrimitives(ShortClass, nme.toByte, S2B)
+ addPrimitives(ShortClass, nme.toShort, S2S)
+ addPrimitives(ShortClass, nme.toChar, S2C)
+ addPrimitives(ShortClass, nme.toInt, S2I)
+ addPrimitives(ShortClass, nme.toLong, S2L)
+ // unary methods
+ addPrimitives(ShortClass, nme.UNARY_+, POS)
+ addPrimitives(ShortClass, nme.UNARY_-, NEG)
+ addPrimitives(ShortClass, nme.UNARY_~, NOT)
+
+ addPrimitives(ShortClass, nme.toFloat, S2F)
+ addPrimitives(ShortClass, nme.toDouble, S2D)
+
+ // scala.Char
+ lazy val CharClass = defn.CharClass
+ addPrimitives(CharClass, nme.EQ, EQ)
+ addPrimitives(CharClass, nme.NE, NE)
+ addPrimitives(CharClass, nme.ADD, ADD)
+ addPrimitives(CharClass, nme.SUB, SUB)
+ addPrimitives(CharClass, nme.MUL, MUL)
+ addPrimitives(CharClass, nme.DIV, DIV)
+ addPrimitives(CharClass, nme.MOD, MOD)
+ addPrimitives(CharClass, nme.LT, LT)
+ addPrimitives(CharClass, nme.LE, LE)
+ addPrimitives(CharClass, nme.GT, GT)
+ addPrimitives(CharClass, nme.GE, GE)
+ addPrimitives(CharClass, nme.XOR, XOR)
+ addPrimitives(CharClass, nme.OR, OR)
+ addPrimitives(CharClass, nme.AND, AND)
+ addPrimitives(CharClass, nme.LSL, LSL)
+ addPrimitives(CharClass, nme.LSR, LSR)
+ addPrimitives(CharClass, nme.ASR, ASR)
+ // conversions
+ addPrimitives(CharClass, nme.toByte, C2B)
+ addPrimitives(CharClass, nme.toShort, C2S)
+ addPrimitives(CharClass, nme.toChar, C2C)
+ addPrimitives(CharClass, nme.toInt, C2I)
+ addPrimitives(CharClass, nme.toLong, C2L)
+ // unary methods
+ addPrimitives(CharClass, nme.UNARY_+, POS)
+ addPrimitives(CharClass, nme.UNARY_-, NEG)
+ addPrimitives(CharClass, nme.UNARY_~, NOT)
+ addPrimitives(CharClass, nme.toFloat, C2F)
+ addPrimitives(CharClass, nme.toDouble, C2D)
+
+ // scala.Int
+ lazy val IntClass = defn.IntClass
+ addPrimitives(IntClass, nme.EQ, EQ)
+ addPrimitives(IntClass, nme.NE, NE)
+ addPrimitives(IntClass, nme.ADD, ADD)
+ addPrimitives(IntClass, nme.SUB, SUB)
+ addPrimitives(IntClass, nme.MUL, MUL)
+ addPrimitives(IntClass, nme.DIV, DIV)
+ addPrimitives(IntClass, nme.MOD, MOD)
+ addPrimitives(IntClass, nme.LT, LT)
+ addPrimitives(IntClass, nme.LE, LE)
+ addPrimitives(IntClass, nme.GT, GT)
+ addPrimitives(IntClass, nme.GE, GE)
+ addPrimitives(IntClass, nme.XOR, XOR)
+ addPrimitives(IntClass, nme.OR, OR)
+ addPrimitives(IntClass, nme.AND, AND)
+ addPrimitives(IntClass, nme.LSL, LSL)
+ addPrimitives(IntClass, nme.LSR, LSR)
+ addPrimitives(IntClass, nme.ASR, ASR)
+ // conversions
+ addPrimitives(IntClass, nme.toByte, I2B)
+ addPrimitives(IntClass, nme.toShort, I2S)
+ addPrimitives(IntClass, nme.toChar, I2C)
+ addPrimitives(IntClass, nme.toInt, I2I)
+ addPrimitives(IntClass, nme.toLong, I2L)
+ // unary methods
+ addPrimitives(IntClass, nme.UNARY_+, POS)
+ addPrimitives(IntClass, nme.UNARY_-, NEG)
+ addPrimitives(IntClass, nme.UNARY_~, NOT)
+ addPrimitives(IntClass, nme.toFloat, I2F)
+ addPrimitives(IntClass, nme.toDouble, I2D)
+
+ // scala.Long
+ lazy val LongClass = defn.LongClass
+ addPrimitives(LongClass, nme.EQ, EQ)
+ addPrimitives(LongClass, nme.NE, NE)
+ addPrimitives(LongClass, nme.ADD, ADD)
+ addPrimitives(LongClass, nme.SUB, SUB)
+ addPrimitives(LongClass, nme.MUL, MUL)
+ addPrimitives(LongClass, nme.DIV, DIV)
+ addPrimitives(LongClass, nme.MOD, MOD)
+ addPrimitives(LongClass, nme.LT, LT)
+ addPrimitives(LongClass, nme.LE, LE)
+ addPrimitives(LongClass, nme.GT, GT)
+ addPrimitives(LongClass, nme.GE, GE)
+ addPrimitives(LongClass, nme.XOR, XOR)
+ addPrimitives(LongClass, nme.OR, OR)
+ addPrimitives(LongClass, nme.AND, AND)
+ addPrimitives(LongClass, nme.LSL, LSL)
+ addPrimitives(LongClass, nme.LSR, LSR)
+ addPrimitives(LongClass, nme.ASR, ASR)
+ // conversions
+ addPrimitives(LongClass, nme.toByte, L2B)
+ addPrimitives(LongClass, nme.toShort, L2S)
+ addPrimitives(LongClass, nme.toChar, L2C)
+ addPrimitives(LongClass, nme.toInt, L2I)
+ addPrimitives(LongClass, nme.toLong, L2L)
+ // unary methods
+ addPrimitives(LongClass, nme.UNARY_+, POS)
+ addPrimitives(LongClass, nme.UNARY_-, NEG)
+ addPrimitives(LongClass, nme.UNARY_~, NOT)
+ addPrimitives(LongClass, nme.toFloat, L2F)
+ addPrimitives(LongClass, nme.toDouble, L2D)
+
+ // scala.Float
+ lazy val FloatClass = defn.FloatClass
+ addPrimitives(FloatClass, nme.EQ, EQ)
+ addPrimitives(FloatClass, nme.NE, NE)
+ addPrimitives(FloatClass, nme.ADD, ADD)
+ addPrimitives(FloatClass, nme.SUB, SUB)
+ addPrimitives(FloatClass, nme.MUL, MUL)
+ addPrimitives(FloatClass, nme.DIV, DIV)
+ addPrimitives(FloatClass, nme.MOD, MOD)
+ addPrimitives(FloatClass, nme.LT, LT)
+ addPrimitives(FloatClass, nme.LE, LE)
+ addPrimitives(FloatClass, nme.GT, GT)
+ addPrimitives(FloatClass, nme.GE, GE)
+ // conversions
+ addPrimitives(FloatClass, nme.toByte, F2B)
+ addPrimitives(FloatClass, nme.toShort, F2S)
+ addPrimitives(FloatClass, nme.toChar, F2C)
+ addPrimitives(FloatClass, nme.toInt, F2I)
+ addPrimitives(FloatClass, nme.toLong, F2L)
+ addPrimitives(FloatClass, nme.toFloat, F2F)
+ addPrimitives(FloatClass, nme.toDouble, F2D)
+ // unary methods
+ addPrimitives(FloatClass, nme.UNARY_+, POS)
+ addPrimitives(FloatClass, nme.UNARY_-, NEG)
+
+ // scala.Double
+ lazy val DoubleClass = defn.DoubleClass
+ addPrimitives(DoubleClass, nme.EQ, EQ)
+ addPrimitives(DoubleClass, nme.NE, NE)
+ addPrimitives(DoubleClass, nme.ADD, ADD)
+ addPrimitives(DoubleClass, nme.SUB, SUB)
+ addPrimitives(DoubleClass, nme.MUL, MUL)
+ addPrimitives(DoubleClass, nme.DIV, DIV)
+ addPrimitives(DoubleClass, nme.MOD, MOD)
+ addPrimitives(DoubleClass, nme.LT, LT)
+ addPrimitives(DoubleClass, nme.LE, LE)
+ addPrimitives(DoubleClass, nme.GT, GT)
+ addPrimitives(DoubleClass, nme.GE, GE)
+ // conversions
+ addPrimitives(DoubleClass, nme.toByte, D2B)
+ addPrimitives(DoubleClass, nme.toShort, D2S)
+ addPrimitives(DoubleClass, nme.toChar, D2C)
+ addPrimitives(DoubleClass, nme.toInt, D2I)
+ addPrimitives(DoubleClass, nme.toLong, D2L)
+ addPrimitives(DoubleClass, nme.toFloat, D2F)
+ addPrimitives(DoubleClass, nme.toDouble, D2D)
+ // unary methods
+ addPrimitives(DoubleClass, nme.UNARY_+, POS)
+ addPrimitives(DoubleClass, nme.UNARY_-, NEG)
+
+
+ primitives.toMap
+ }
+
+ def isPrimitive(fun: Tree): Boolean = {
+ (primitives contains fun.symbol(ctx)) ||
+ (fun.symbol(ctx) == NoSymbol // the only trees that do not have a symbol assigned are array.{update,select,length,clone}}
+ && (fun match {
+ case Select(_, StdNames.nme.clone_) => false // but array.clone is NOT a primitive op.
+ case _ => true
+ }))
+ }
+
+}
+
diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala
new file mode 100644
index 000000000..56b6dabbe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/Bench.scala
@@ -0,0 +1,46 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools
+package dotc
+
+import core.Contexts.Context
+import reporting.Reporter
+
+/** A main class for running compiler benchmarks. Can instantiate a given
+ * number of compilers and run each (sequentially) a given number of times
+ * on the same sources.
+ */
+object Bench extends Driver {
+
+ @sharable private var numRuns = 1
+
+ def newCompiler(implicit ctx: Context): Compiler = new Compiler
+
+ private def ntimes(n: Int)(op: => Reporter): Reporter =
+ (emptyReporter /: (0 until n)) ((_, _) => op)
+
+ override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter =
+ ntimes(numRuns) {
+ val start = System.nanoTime()
+ val r = super.doCompile(compiler, fileNames)
+ println(s"time elapsed: ${(System.nanoTime - start) / 1000000}ms")
+ r
+ }
+
+ def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = {
+ val pos = args indexOf name
+ if (pos < 0) (default, args)
+ else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2)))
+ }
+
+ override def process(args: Array[String], rootCtx: Context): Reporter = {
+ val (numCompilers, args1) = extractNumArg(args, "#compilers")
+ val (numRuns, args2) = extractNumArg(args1, "#runs")
+ this.numRuns = numRuns
+ ntimes(numCompilers)(super.process(args2, rootCtx))
+ }
+}
+
+
diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala
new file mode 100644
index 000000000..491c2bd9b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala
@@ -0,0 +1,24 @@
+package dotty.tools
+package dotc
+
+import dotty.tools.dotc.core.Types.Type
+import dotty.tools.dotc.core.tasty.{TastyUnpickler, TastyBuffer, TastyPickler}
+import util.SourceFile
+import ast.{tpd, untpd}
+import dotty.tools.dotc.core.Symbols._
+
+class CompilationUnit(val source: SourceFile) {
+
+ override def toString = source.toString
+
+ var untpdTree: untpd.Tree = untpd.EmptyTree
+
+ var tpdTree: tpd.Tree = tpd.EmptyTree
+
+ def isJava = source.file.name.endsWith(".java")
+
+ /** Pickled TASTY binaries, indexed by class. */
+ var pickled: Map[ClassSymbol, Array[Byte]] = Map()
+
+ var unpicklers: Map[ClassSymbol, TastyUnpickler] = Map()
+}
diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala
new file mode 100644
index 000000000..ad3249be2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/Compiler.scala
@@ -0,0 +1,145 @@
+package dotty.tools
+package dotc
+
+import core._
+import Contexts._
+import Periods._
+import Symbols._
+import Types._
+import Scopes._
+import typer.{FrontEnd, Typer, ImportInfo, RefChecks}
+import reporting.{Reporter, ConsoleReporter}
+import Phases.Phase
+import transform._
+import util.FreshNameCreator
+import transform.TreeTransforms.{TreeTransform, TreeTransformer}
+import core.DenotTransformers.DenotTransformer
+import core.Denotations.SingleDenotation
+
+import dotty.tools.backend.jvm.{LabelDefs, GenBCode, CollectSuperCalls}
+
+/** The central class of the dotc compiler. The job of a compiler is to create
+ * runs, which process given `phases` in a given `rootContext`.
+ */
+class Compiler {
+
+ /** Meta-ordering constraint:
+ *
+ * DenotTransformers that change the signature of their denotation's info must go
+ * after erasure. The reason is that denotations are permanently referred to by
+ * TermRefs which contain a signature. If the signature of a symbol would change,
+ * all refs to it would become outdated - they could not be dereferenced in the
+ * new phase.
+ *
+ * After erasure, signature changing denot-transformers are OK because erasure
+ * will make sure that only term refs with fixed SymDenotations survive beyond it. This
+ * is possible because:
+ *
+ * - splitter has run, so every ident or select refers to a unique symbol
+ * - after erasure, asSeenFrom is the identity, so every reference has a
+ * plain SymDenotation, as opposed to a UniqueRefDenotation.
+ */
+ def phases: List[List[Phase]] =
+ List(
+ List(new FrontEnd), // Compiler frontend: scanner, parser, namer, typer
+ List(new sbt.ExtractDependencies), // Sends information on classes' dependencies to sbt via callbacks
+ List(new PostTyper), // Additional checks and cleanups after type checking
+ List(new sbt.ExtractAPI), // Sends a representation of the API of classes to sbt via callbacks
+ List(new Pickler), // Generate TASTY info
+ List(new FirstTransform, // Some transformations to put trees into a canonical form
+ new CheckReentrant), // Internal use only: Check that compiled program has no data races involving global vars
+ List(new RefChecks, // Various checks mostly related to abstract members and overriding
+ new CheckStatic, // Check restrictions that apply to @static members
+ new ElimRepeated, // Rewrite vararg parameters and arguments
+ new NormalizeFlags, // Rewrite some definition flags
+ new ExtensionMethods, // Expand methods of value classes with extension methods
+ new ExpandSAMs, // Expand single abstract method closures to anonymous classes
+ new TailRec, // Rewrite tail recursion to loops
+ new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods
+ new ClassOf), // Expand `Predef.classOf` calls.
+ List(new TryCatchPatterns, // Compile cases in try/catch
+ new PatternMatcher, // Compile pattern matches
+ new ExplicitOuter, // Add accessors to outer classes from nested ones.
+ new ExplicitSelf, // Make references to non-trivial self types explicit as casts
+ new CrossCastAnd, // Normalize selections involving intersection types.
+ new Splitter), // Expand selections involving union types into conditionals
+ List(new VCInlineMethods, // Inlines calls to value class methods
+ new IsInstanceOfEvaluator, // Issues warnings when unreachable statements are present in match/if expressions
+ new SeqLiterals, // Express vararg arguments as arrays
+ new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods
+ new Getters, // Replace non-private vals and vars with getter defs (fields are added later)
+ new ElimByName, // Expand by-name parameters and arguments
+ new AugmentScala2Traits, // Expand traits defined in Scala 2.11 to simulate old-style rewritings
+ new ResolveSuper, // Implement super accessors and add forwarders to trait methods
+ new ArrayConstructors), // Intercept creation of (non-generic) arrays and intrinsify.
+ List(new Erasure), // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
+ List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
+ new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
+ new Mixin, // Expand trait fields and trait initializers
+ new LazyVals, // Expand lazy vals
+ new Memoize, // Add private fields to getters and setters
+ new LinkScala2ImplClasses, // Forward calls to the implementation classes of traits defined by Scala 2.11
+ new NonLocalReturns, // Expand non-local returns
+ new CapturedVars, // Represent vars captured by closures as heap objects
+ new Constructors, // Collect initialization code in primary constructors
+ // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it
+ new FunctionalInterfaces, // Rewrites closures to implement @specialized types of Functions.
+ new GetClass), // Rewrites getClass calls on primitive types.
+ List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments
+ // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here
+ new ElimStaticThis, // Replace `this` references to static objects by global identifiers
+ new Flatten, // Lift all inner classes to package scope
+ new RestoreScopes), // Repair scopes rendered invalid by moving definitions in prior phases of the group
+ List(new ExpandPrivate, // Widen private definitions accessed from nested classes
+ new SelectStatic, // get rid of selects that would be compiled into GetStatic
+ new CollectEntryPoints, // Find classes with main methods
+ new CollectSuperCalls, // Find classes that are called with super
+ new DropInlined, // Drop Inlined nodes, since backend has no use for them
+ new MoveStatics, // Move static methods to companion classes
+ new LabelDefs), // Converts calls to labels to jumps
+ List(new GenBCode) // Generate JVM bytecode
+ )
+
+ var runId = 1
+ def nextRunId = {
+ runId += 1; runId
+ }
+
+ /** Produces the following contexts, from outermost to innermost
+ *
+ * bootStrap: A context with next available runId and a scope consisting of
+ * the RootPackage _root_
+ * start A context with RootClass as owner and the necessary initializations
+ * for type checking.
+ * imports For each element of RootImports, an import context
+ */
+ def rootContext(implicit ctx: Context): Context = {
+ ctx.initialize()(ctx)
+ ctx.setPhasePlan(phases)
+ val rootScope = new MutableScope
+ val bootstrap = ctx.fresh
+ .setPeriod(Period(nextRunId, FirstPhaseId))
+ .setScope(rootScope)
+ rootScope.enter(ctx.definitions.RootPackage)(bootstrap)
+ val start = bootstrap.fresh
+ .setOwner(defn.RootClass)
+ .setTyper(new Typer)
+ .setMode(Mode.ImplicitsEnabled)
+ .setTyperState(new MutableTyperState(ctx.typerState, ctx.typerState.reporter, isCommittable = true))
+ .setFreshNames(new FreshNameCreator.Default)
+ ctx.initialize()(start) // re-initialize the base context with start
+ def addImport(ctx: Context, refFn: () => TermRef) =
+ ctx.fresh.setImportInfo(ImportInfo.rootImport(refFn)(ctx))
+ (start.setRunInfo(new RunInfo(start)) /: defn.RootImportFns)(addImport)
+ }
+
+ def reset()(implicit ctx: Context): Unit = {
+ ctx.base.reset()
+ ctx.runInfo.clear()
+ }
+
+ def newRun(implicit ctx: Context): Run = {
+ reset()
+ new Run(this)(rootContext)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala
new file mode 100644
index 000000000..f54a23ad2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/Driver.scala
@@ -0,0 +1,134 @@
+package dotty.tools.dotc
+
+import dotty.tools.FatalError
+import config.CompilerCommand
+import core.Contexts.{Context, ContextBase}
+import util.DotClass
+import reporting._
+import scala.util.control.NonFatal
+
+/** Run the Dotty compiler.
+ *
+ * Extending this class lets you customize many aspect of the compilation
+ * process, but in most cases you only need to call [[process]] on the
+ * existing object [[Main]].
+ */
+abstract class Driver extends DotClass {
+
+ protected def newCompiler(implicit ctx: Context): Compiler
+
+ protected def emptyReporter: Reporter = new StoreReporter(null)
+
+ protected def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter =
+ if (fileNames.nonEmpty)
+ try {
+ val run = compiler.newRun
+ run.compile(fileNames)
+ run.printSummary()
+ }
+ catch {
+ case ex: FatalError =>
+ ctx.error(ex.getMessage) // signals that we should fail compilation.
+ ctx.reporter
+ }
+ else ctx.reporter
+
+ protected def initCtx = (new ContextBase).initialCtx
+
+ protected def sourcesRequired = true
+
+ def setup(args: Array[String], rootCtx: Context): (List[String], Context) = {
+ val ctx = rootCtx.fresh
+ val summary = CompilerCommand.distill(args)(ctx)
+ ctx.setSettings(summary.sstate)
+ val fileNames = CompilerCommand.checkUsage(summary, sourcesRequired)(ctx)
+ (fileNames, ctx)
+ }
+
+ /** Entry point to the compiler that can be conveniently used with Java reflection.
+ *
+ * This entry point can easily be used without depending on the `dotty` package,
+ * you only need to depend on `dotty-interfaces` and call this method using
+ * reflection. This allows you to write code that will work against multiple
+ * versions of dotty without recompilation.
+ *
+ * The trade-off is that you can only pass a SimpleReporter to this method
+ * and not a normal Reporter which is more powerful.
+ *
+ * Usage example: [[https://github.com/lampepfl/dotty/tree/master/test/test/InterfaceEntryPointTest.scala]]
+ *
+ * @param args Arguments to pass to the compiler.
+ * @param simple Used to log errors, warnings, and info messages.
+ * The default reporter is used if this is `null`.
+ * @param callback Used to execute custom code during the compilation
+ * process. No callbacks will be executed if this is `null`.
+ * @return
+ */
+ final def process(args: Array[String], simple: interfaces.SimpleReporter,
+ callback: interfaces.CompilerCallback): interfaces.ReporterResult = {
+ val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple)
+ process(args, reporter, callback)
+ }
+
+ /** Principal entry point to the compiler.
+ *
+ * Usage example: [[https://github.com/lampepfl/dotty/tree/master/test/test/OtherEntryPointsTest.scala]]
+ * in method `runCompiler`
+ *
+ * @param args Arguments to pass to the compiler.
+ * @param reporter Used to log errors, warnings, and info messages.
+ * The default reporter is used if this is `null`.
+ * @param callback Used to execute custom code during the compilation
+ * process. No callbacks will be executed if this is `null`.
+ * @return The `Reporter` used. Use `Reporter#hasErrors` to check
+ * if compilation succeeded.
+ */
+ final def process(args: Array[String], reporter: Reporter = null,
+ callback: interfaces.CompilerCallback = null): Reporter = {
+ val ctx = initCtx.fresh
+ if (reporter != null)
+ ctx.setReporter(reporter)
+ if (callback != null)
+ ctx.setCompilerCallback(callback)
+ process(args, ctx)
+ }
+
+ /** Entry point to the compiler with no optional arguments.
+ *
+ * This overload is provided for compatibility reasons: the
+ * `RawCompiler` of sbt expects this method to exist and calls
+ * it using reflection. Keeping it means that we can change
+ * the other overloads without worrying about breaking compatibility
+ * with sbt.
+ */
+ final def process(args: Array[String]): Reporter =
+ process(args, null: Reporter, null: interfaces.CompilerCallback)
+
+ /** Entry point to the compiler using a custom `Context`.
+ *
+ * In most cases, you do not need a custom `Context` and should
+ * instead use one of the other overloads of `process`. However,
+ * the other overloads cannot be overriden, instead you
+ * should override this one which they call internally.
+ *
+ * Usage example: [[https://github.com/lampepfl/dotty/tree/master/test/test/OtherEntryPointsTest.scala]]
+ * in method `runCompilerWithContext`
+ *
+ * @param args Arguments to pass to the compiler.
+ * @param rootCtx The root Context to use.
+ * @return The `Reporter` used. Use `Reporter#hasErrors` to check
+ * if compilation succeeded.
+ */
+ def process(args: Array[String], rootCtx: Context): Reporter = {
+ val (fileNames, ctx) = setup(args, rootCtx)
+ doCompile(newCompiler(ctx), fileNames)(ctx)
+ }
+
+ def main(args: Array[String]): Unit = {
+ // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError,
+ // we may try to load it but fail with another StackOverflowError and lose the original exception,
+ // see <https://groups.google.com/forum/#!topic/scala-user/kte6nak-zPM>.
+ val _ = NonFatal
+ sys.exit(if (process(args).hasErrors) 1 else 0)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/FromTasty.scala b/compiler/src/dotty/tools/dotc/FromTasty.scala
new file mode 100644
index 000000000..b060a2054
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/FromTasty.scala
@@ -0,0 +1,107 @@
+/* dotc
+ * Copyright 2005-2015 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools
+package dotc
+
+import core._
+import Contexts._
+import Symbols._
+import SymDenotations._
+import typer.FrontEnd
+import Phases.Phase
+import util._
+import reporting.Reporter
+import Decorators._
+import dotty.tools.dotc.transform.Pickler
+import tasty.DottyUnpickler
+import ast.tpd._
+
+/** Compiler for TASTY files.
+ * Usage:
+ *
+ * scala dotty.tools.dotc.FromTasty (option | classname)*
+ *
+ * Options are as for dotc.
+ * Classnames are fully qualified names of top-level classes that need to have a TASTY attribute.
+ * Example:
+ *
+ * scala dotty.tools.dotc.FromTasty -Xprint:front extMethods.T
+ */
+object FromTasty extends Driver {
+ override def newCompiler(implicit ctx: Context): Compiler = new TASTYCompiler
+
+ class TASTYCompiler extends Compiler {
+
+ override def phases: List[List[Phase]] = {
+ val backendPhases = super.phases.dropWhile {
+ case List(_: Pickler) => false
+ case _ => true
+ }.tail
+ List(new ReadTastyTreesFromClasses) :: backendPhases
+ }
+
+ override def newRun(implicit ctx: Context): Run = {
+ reset()
+ new TASTYRun(this)(rootContext)
+ }
+ }
+
+ class TASTYRun(comp: Compiler)(implicit ctx: Context) extends Run(comp) {
+ override def compile(classNames: List[String]) = {
+ units = classNames.map(new TASTYCompilationUnit(_))
+ compileUnits()
+ }
+ }
+
+ class TASTYCompilationUnit(val className: String) extends CompilationUnit(NoSource) {
+ override def toString = s"class file $className"
+ }
+
+ object force extends TreeTraverser {
+ def traverse(tree: Tree)(implicit ctx: Context): Unit = traverseChildren(tree)
+ }
+
+ class ReadTastyTreesFromClasses extends FrontEnd {
+
+ override def isTyper = false
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] =
+ units.map(readTASTY)
+
+ def readTASTY(unit: CompilationUnit)(implicit ctx: Context): CompilationUnit = unit match {
+ case unit: TASTYCompilationUnit =>
+ val className = unit.className.toTypeName
+ val clsd =
+ if (className.contains('.')) ctx.base.staticRef(className)
+ else defn.EmptyPackageClass.info.decl(className)
+ def cannotUnpickle(reason: String) = {
+ ctx.error(s"class $className cannot be unpickled because $reason")
+ unit
+ }
+ clsd match {
+ case clsd: ClassDenotation =>
+ clsd.infoOrCompleter match {
+ case info: ClassfileLoader =>
+ info.load(clsd) match {
+ case Some(unpickler: DottyUnpickler) =>
+ val List(unpickled) = unpickler.body(ctx.addMode(Mode.ReadPositions))
+ val unit1 = new CompilationUnit(new SourceFile(clsd.symbol.sourceFile, Seq()))
+ unit1.tpdTree = unpickled
+ unit1.unpicklers += (clsd.classSymbol -> unpickler.unpickler)
+ force.traverse(unit1.tpdTree)
+ unit1
+ case _ =>
+ cannotUnpickle(s"its class file ${info.classfile} does not have a TASTY attribute")
+ }
+ case info =>
+ cannotUnpickle(s"its info of type ${info.getClass} is not a ClassfileLoader")
+ }
+ case _ =>
+ ctx.error(s"class not found: $className")
+ unit
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/Main.scala b/compiler/src/dotty/tools/dotc/Main.scala
new file mode 100644
index 000000000..a6844fbbc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/Main.scala
@@ -0,0 +1,9 @@
+package dotty.tools
+package dotc
+
+import core.Contexts.Context
+
+/** Main class of the `dotc` batch compiler. */
+object Main extends Driver {
+ override def newCompiler(implicit ctx: Context): Compiler = new Compiler
+}
diff --git a/compiler/src/dotty/tools/dotc/Resident.scala b/compiler/src/dotty/tools/dotc/Resident.scala
new file mode 100644
index 000000000..56f6684d0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/Resident.scala
@@ -0,0 +1,58 @@
+package dotty.tools
+package dotc
+
+import core.Contexts.Context
+import reporting.Reporter
+import java.io.EOFException
+import scala.annotation.tailrec
+
+/** A compiler which stays resident between runs. This is more of a PoC than
+ * something that's expected to be used often
+ *
+ * Usage:
+ *
+ * > scala dotty.tools.dotc.Resident <options> <initial files>
+ *
+ * dotc> "more options and files to compile"
+ *
+ * ...
+ *
+ * dotc> :reset // reset all options to the ones passed on the command line
+ *
+ * ...
+ *
+ * dotc> :q // quit
+ */
+class Resident extends Driver {
+
+ object residentCompiler extends Compiler
+
+ override def newCompiler(implicit ctx: Context): Compiler = ???
+
+ override def sourcesRequired = false
+
+ private val quit = ":q"
+ private val reset = ":reset"
+ private val prompt = "dotc> "
+
+ private def getLine() = {
+ Console.print(prompt)
+ try scala.io.StdIn.readLine() catch { case _: EOFException => quit }
+ }
+
+ final override def process(args: Array[String], rootCtx: Context): Reporter = {
+ @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = {
+ var (fileNames, ctx) = setup(args, prevCtx)
+ doCompile(residentCompiler, fileNames)(ctx)
+ var nextCtx = ctx
+ var line = getLine()
+ while (line == reset) {
+ nextCtx = rootCtx
+ line = getLine()
+ }
+ if (line.startsWith(quit)) ctx.reporter
+ else loop(line split "\\s+", nextCtx)
+ }
+ loop(args, rootCtx)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala
new file mode 100644
index 000000000..0f652ff0b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/Run.scala
@@ -0,0 +1,138 @@
+package dotty.tools
+package dotc
+
+import core._
+import Contexts._
+import Periods._
+import Symbols._
+import Phases._
+import Decorators._
+import dotty.tools.dotc.transform.TreeTransforms.TreeTransformer
+import io.PlainFile
+import scala.io.Codec
+import util._
+import reporting.Reporter
+import transform.TreeChecker
+import rewrite.Rewrites
+import java.io.{BufferedWriter, OutputStreamWriter}
+
+import scala.annotation.tailrec
+import scala.reflect.io.VirtualFile
+import scala.util.control.NonFatal
+
+/** A compiler run. Exports various methods to compile source files */
+class Run(comp: Compiler)(implicit ctx: Context) {
+
+ assert(comp.phases.last.last.id <= Periods.MaxPossiblePhaseId)
+ assert(ctx.runId <= Periods.MaxPossibleRunId)
+
+ var units: List[CompilationUnit] = _
+
+ def getSource(fileName: String): SourceFile = {
+ val f = new PlainFile(fileName)
+ if (f.isDirectory) {
+ ctx.error(s"expected file, received directory '$fileName'")
+ NoSource
+ } else if (f.exists) {
+ val encoding = ctx.settings.encoding.value
+ new SourceFile(f, Codec(encoding))
+ } else {
+ ctx.error(s"not found: $fileName")
+ NoSource
+ }
+ }
+
+ def compile(fileNames: List[String]): Unit = try {
+ val sources = fileNames map getSource
+ compileSources(sources)
+ } catch {
+ case NonFatal(ex) =>
+ ctx.echo(i"exception occurred while compiling $units%, %")
+ throw ex
+ }
+
+ /** TODO: There's a fundamental design problem here: We assemble phases using `squash`
+ * when we first build the compiler. But we modify them with -Yskip, -Ystop
+ * on each run. That modification needs to either transform the tree structure,
+ * or we need to assemble phases on each run, and take -Yskip, -Ystop into
+ * account. I think the latter would be preferable.
+ */
+ def compileSources(sources: List[SourceFile]) =
+ if (sources forall (_.exists)) {
+ units = sources map (new CompilationUnit(_))
+ compileUnits()
+ }
+
+ protected def compileUnits() = Stats.monitorHeartBeat {
+ ctx.checkSingleThreaded()
+ val phases = ctx.squashPhases(ctx.phasePlan,
+ ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, ctx.settings.YstopAfter.value, ctx.settings.Ycheck.value)
+ ctx.usePhases(phases)
+ var lastPrintedTree: PrintedTree = NoPrintedTree
+ for (phase <- ctx.allPhases)
+ if (!ctx.reporter.hasErrors) {
+ val start = System.currentTimeMillis
+ units = phase.runOn(units)
+ if (ctx.settings.Xprint.value.containsPhase(phase)) {
+ for (unit <- units) {
+ lastPrintedTree =
+ printTree(lastPrintedTree)(ctx.fresh.setPhase(phase.next).setCompilationUnit(unit))
+ }
+ }
+ ctx.informTime(s"$phase ", start)
+ }
+ if (!ctx.reporter.hasErrors) Rewrites.writeBack()
+ for (unit <- units)
+ Stats.record("retained typed trees at end", unit.tpdTree.treeSize)
+ Stats.record("total trees at end", ast.Trees.ntrees)
+ }
+
+ private sealed trait PrintedTree
+ private final case class SomePrintedTree(phase: String, tree: String) extends PrintedTree
+ private object NoPrintedTree extends PrintedTree
+
+ private def printTree(last: PrintedTree)(implicit ctx: Context): PrintedTree = {
+ val unit = ctx.compilationUnit
+ val prevPhase = ctx.phase.prev // can be a mini-phase
+ val squashedPhase = ctx.squashed(prevPhase)
+ val treeString = unit.tpdTree.show
+
+ ctx.echo(s"result of $unit after $squashedPhase:")
+
+ last match {
+ case SomePrintedTree(phase, lastTreeSting) if lastTreeSting != treeString =>
+ val msg =
+ if (!ctx.settings.XprintDiff.value && !ctx.settings.XprintDiffDel.value) treeString
+ else DiffUtil.mkColoredCodeDiff(treeString, lastTreeSting, ctx.settings.XprintDiffDel.value)
+ ctx.echo(msg)
+ SomePrintedTree(squashedPhase.toString, treeString)
+
+ case SomePrintedTree(phase, lastTreeSting) =>
+ ctx.echo(" Unchanged since " + phase)
+ last
+
+ case NoPrintedTree =>
+ ctx.echo(treeString)
+ SomePrintedTree(squashedPhase.toString, treeString)
+ }
+ }
+
+ def compile(sourceCode: String): Unit = {
+ val virtualFile = new VirtualFile(sourceCode) // use source code as name as it's used for equals
+ val writer = new BufferedWriter(new OutputStreamWriter(virtualFile.output, "UTF-8")) // buffering is still advised by javadoc
+ writer.write(sourceCode)
+ writer.close()
+ compileSources(List(new SourceFile(virtualFile, Codec.UTF8)))
+ }
+
+ /** The context created for this run */
+ def runContext = ctx
+
+ /** Print summary; return # of errors encountered */
+ def printSummary(): Reporter = {
+ ctx.runInfo.printMaxConstraint()
+ val r = ctx.reporter
+ r.printSummary
+ r
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled
new file mode 100644
index 000000000..255619f35
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled
@@ -0,0 +1,258 @@
+package dotty.tools
+package dotc
+package ast
+
+import core._
+import util.Positions._, Types._, Contexts._, Constants._, Names._, Flags._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
+
+// TODO: revise, integrate in a checking phase.
+object CheckTrees {
+
+ import tpd._
+
+ def check(p: Boolean, msg: => String = "")(implicit ctx: Context): Unit = assert(p, msg)
+
+ def checkTypeArg(arg: Tree, bounds: TypeBounds)(implicit ctx: Context): Unit = {
+ check(arg.isValueType)
+ check(bounds contains arg.tpe)
+ }
+
+ def escapingRefs(block: Block)(implicit ctx: Context): collection.Set[NamedType] = {
+ var hoisted: Set[Symbol] = Set()
+ lazy val locals = ctx.typeAssigner.localSyms(block.stats).toSet
+ def isLocal(sym: Symbol): Boolean =
+ (locals contains sym) && !isHoistableClass(sym)
+ def isHoistableClass(sym: Symbol) =
+ sym.isClass && {
+ (hoisted contains sym) || {
+ hoisted += sym
+ !classLeaks(sym.asClass)
+ }
+ }
+ def leakingTypes(tp: Type): collection.Set[NamedType] =
+ tp namedPartsWith (tp => isLocal(tp.symbol))
+ def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty
+ def classLeaks(sym: ClassSymbol): Boolean =
+ (ctx.owner is Method) || // can't hoist classes out of method bodies
+ (sym.info.parents exists typeLeaks) ||
+ (sym.decls.toList exists (t => typeLeaks(t.info)))
+ leakingTypes(block.tpe)
+ }
+
+ def checkType(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case Ident(name) =>
+ case Select(qualifier, name) =>
+ check(qualifier.isValue)
+ check(qualifier.tpe =:= tree.tpe.normalizedPrefix)
+ val denot = qualifier.tpe.member(name)
+ check(denot.exists)
+ check(denot.hasAltWith(_.symbol == tree.symbol))
+ case This(cls) =>
+ case Super(qual, mixin) =>
+ check(qual.isValue)
+ val cls = qual.tpe.typeSymbol
+ check(cls.isClass)
+ case Apply(fn, args) =>
+ def checkArg(arg: Tree, name: Name, formal: Type): Unit = {
+ arg match {
+ case NamedArg(argName, _) =>
+ check(argName == name)
+ case _ =>
+ check(arg.isValue)
+ }
+ check(arg.tpe <:< formal)
+ }
+ val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction
+ (args, paramNames, paramTypes).zipped foreach checkArg
+ case TypeApply(fn, args) =>
+ val pt @ PolyType(_) = fn.tpe.widen // checked already at construction
+ (args, pt.instantiateBounds(args map (_.tpe))).zipped foreach checkTypeArg
+ case Literal(const: Constant) =>
+ case New(tpt) =>
+ check(tpt.isValueType)
+ val cls = tpt.tpe.typeSymbol
+ check(cls.isClass)
+ check(!(cls is AbstractOrTrait))
+ case Pair(left, right) =>
+ check(left.isValue)
+ check(right.isValue)
+ case Typed(expr, tpt) =>
+ check(tpt.isValueType)
+ expr.tpe.widen match {
+ case tp: MethodType =>
+ val cls = tpt.tpe.typeSymbol
+ check(cls.isClass)
+ check((cls is Trait) ||
+ cls.primaryConstructor.info.paramTypess.flatten.isEmpty)
+ val absMembers = tpt.tpe.abstractTermMembers
+ check(absMembers.size == 1)
+ check(tp <:< absMembers.head.info)
+ case _ =>
+ check(expr.isValueOrPattern)
+ check(expr.tpe <:< tpt.tpe.translateParameterized(defn.RepeatedParamClass, defn.SeqClass))
+ }
+ case NamedArg(name, arg) =>
+ case Assign(lhs, rhs) =>
+ check(lhs.isValue); check(rhs.isValue)
+ lhs.tpe match {
+ case ltpe: TermRef =>
+ check(ltpe.symbol is Mutable)
+ case _ =>
+ check(false)
+ }
+ check(rhs.tpe <:< lhs.tpe.widen)
+ case tree @ Block(stats, expr) =>
+ check(expr.isValue)
+ check(escapingRefs(tree).isEmpty)
+ case If(cond, thenp, elsep) =>
+ check(cond.isValue); check(thenp.isValue); check(elsep.isValue)
+ check(cond.tpe isRef defn.BooleanClass)
+ case Closure(env, meth, target) =>
+ meth.tpe.widen match {
+ case mt @ MethodType(_, paramTypes) =>
+ if (target.isEmpty) {
+ check(env.length < paramTypes.length)
+ for ((arg, formal) <- env zip paramTypes)
+ check(arg.tpe <:< formal)
+ }
+ else
+ // env is stored in class, not method
+ target.tpe match {
+ case SAMType(targetMeth) =>
+ check(mt <:< targetMeth.info)
+ }
+ }
+ case Match(selector, cases) =>
+ check(selector.isValue)
+ // are any checks that relate selector and patterns desirable?
+ case CaseDef(pat, guard, body) =>
+ check(pat.isValueOrPattern); check(guard.isValue); check(body.isValue)
+ check(guard.tpe.derivesFrom(defn.BooleanClass))
+ case Return(expr, from) =>
+ check(expr.isValue); check(from.isTerm)
+ check(from.tpe.termSymbol.isRealMethod)
+ case Try(block, handler, finalizer) =>
+ check(block.isTerm)
+ check(finalizer.isTerm)
+ check(handler.isTerm)
+ check(handler.tpe derivesFrom defn.FunctionClass(1))
+ check(handler.tpe.baseArgInfos(defn.FunctionClass(1)).head <:< defn.ThrowableType)
+ case Throw(expr) =>
+ check(expr.isValue)
+ check(expr.tpe.derivesFrom(defn.ThrowableClass))
+ case SeqLiteral(elems) =>
+ val elemtp = tree.tpe.elemType
+ for (elem <- elems) {
+ check(elem.isValue)
+ check(elem.tpe <:< elemtp)
+ }
+ case TypeTree(original) =>
+ if (!original.isEmpty) {
+ check(original.isValueType)
+ check(original.tpe == tree.tpe)
+ }
+ case SingletonTypeTree(ref) =>
+ check(ref.isValue)
+ check(ref.symbol.isStable)
+ case SelectFromTypeTree(qualifier, name) =>
+ check(qualifier.isValueType)
+ check(qualifier.tpe =:= tree.tpe.normalizedPrefix)
+ val denot = qualifier.tpe.member(name)
+ check(denot.exists)
+ check(denot.symbol == tree.symbol)
+ case AndTypeTree(left, right) =>
+ check(left.isValueType); check(right.isValueType)
+ case OrTypeTree(left, right) =>
+ check(left.isValueType); check(right.isValueType)
+ case RefinedTypeTree(tpt, refinements) =>
+ check(tpt.isValueType)
+ def checkRefinements(forbidden: Set[Symbol], rs: List[Tree]): Unit = rs match {
+ case r :: rs1 =>
+ val rsym = r.symbol
+ check(rsym.isTerm || rsym.isAbstractOrAliasType)
+ if (rsym.isAbstractType) check(tpt.tpe.member(rsym.name).exists)
+ check(rsym.info forallParts {
+ case nt: NamedType => !(forbidden contains nt.symbol)
+ case _ => true
+ })
+ checkRefinements(forbidden - rsym, rs1)
+ case nil =>
+ }
+ checkRefinements(ctx.typeAssigner.localSyms(refinements).toSet, refinements)
+ case AppliedTypeTree(tpt, args) =>
+ check(tpt.isValueType)
+ val tparams = tpt.tpe.typeParams
+ check(sameLength(tparams, args))
+ (args, tparams map (_.info.bounds)).zipped foreach checkTypeArg
+ case TypeBoundsTree(lo, hi) =>
+ check(lo.isValueType); check(hi.isValueType)
+ check(lo.tpe <:< hi.tpe)
+ case Bind(sym, body) =>
+ check(body.isValueOrPattern)
+ check(!(tree.symbol is Method))
+ body match {
+ case Ident(nme.WILDCARD) =>
+ case _ => check(body.tpe.widen =:= tree.symbol.info)
+ }
+ case Alternative(alts) =>
+ for (alt <- alts) check(alt.isValueOrPattern)
+ case UnApply(fun, implicits, args) => // todo: review
+ check(fun.isTerm)
+ for (arg <- args) check(arg.isValueOrPattern)
+ val funtpe @ MethodType(_, _) = fun.tpe.widen
+ fun.symbol.name match { // check arg arity
+ case nme.unapplySeq =>
+ // args need to be wrapped in (...: _*)
+ check(args.length == 1)
+ check(args.head.isInstanceOf[SeqLiteral])
+ case nme.unapply =>
+ val rtp = funtpe.resultType
+ if (rtp isRef defn.BooleanClass)
+ check(args.isEmpty)
+ else {
+ check(rtp isRef defn.OptionClass)
+ val normArgs = rtp.argTypesHi match {
+ case optionArg :: Nil =>
+ optionArg.argTypesHi match {
+ case Nil =>
+ optionArg :: Nil
+ case tupleArgs if defn.isTupleType(optionArg) =>
+ tupleArgs
+ }
+ case _ =>
+ check(false)
+ Nil
+ }
+ check(sameLength(normArgs, args))
+ }
+ }
+ case ValDef(mods, name, tpt, rhs) =>
+ check(!(tree.symbol is Method))
+ if (!rhs.isEmpty) {
+ check(rhs.isValue)
+ check(rhs.tpe <:< tpt.tpe)
+ }
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ check(tree.symbol is Method)
+ if (!rhs.isEmpty) {
+ check(rhs.isValue)
+ check(rhs.tpe <:< tpt.tpe)
+ }
+ case TypeDef(mods, name, tpt) =>
+ check(tpt.isInstanceOf[Template] || tpt.tpe.isInstanceOf[TypeBounds])
+ case Template(constr, parents, selfType, body) =>
+ case Import(expr, selectors) =>
+ check(expr.isValue)
+ check(expr.tpe.termSymbol.isStable)
+ case PackageDef(pid, stats) =>
+ check(pid.isTerm)
+ check(pid.symbol is Package)
+ case Annotated(annot, arg) =>
+ check(annot.isInstantiation)
+ check(annot.symbol.owner.isSubClass(defn.AnnotationClass))
+ check(arg.isValueType || arg.isValue)
+ case EmptyTree =>
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
new file mode 100644
index 000000000..366a0e225
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
@@ -0,0 +1,1089 @@
+package dotty.tools
+package dotc
+package ast
+
+import core._
+import util.Positions._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
+import Decorators._
+import language.higherKinds
+import collection.mutable.ListBuffer
+import util.Property
+import reporting.diagnostic.messages._
+
+object desugar {
+ import untpd._
+
+ /** Tags a .withFilter call generated by desugaring a for expression.
+ * Such calls can alternatively be rewritten to use filter.
+ */
+ val MaybeFilter = new Property.Key[Unit]
+
+ /** Info of a variable in a pattern: The named tree and its type */
+ private type VarInfo = (NameTree, Tree)
+
+ /** Names of methods that are added unconditionally to case classes */
+ def isDesugaredCaseClassMethodName(name: Name)(implicit ctx: Context): Boolean =
+ name == nme.isDefined ||
+ name == nme.copy ||
+ name == nme.productArity ||
+ name.isSelectorName
+
+// ----- DerivedTypeTrees -----------------------------------
+
+ class SetterParamTree extends DerivedTypeTree {
+ def derivedType(sym: Symbol)(implicit ctx: Context) = sym.info.resultType
+ }
+
+ class TypeRefTree extends DerivedTypeTree {
+ def derivedType(sym: Symbol)(implicit ctx: Context) = sym.typeRef
+ }
+
+ class DerivedFromParamTree extends DerivedTypeTree {
+
+ /** Make sure that for all enclosing module classes their companion lasses
+ * are completed. Reason: We need the constructor of such companion classes to
+ * be completed so that OriginalSymbol attachments are pushed to DerivedTypeTrees
+ * in apply/unapply methods.
+ */
+ override def ensureCompletions(implicit ctx: Context) =
+ if (!(ctx.owner is Package))
+ if (ctx.owner.isClass) {
+ ctx.owner.ensureCompleted()
+ if (ctx.owner is ModuleClass)
+ ctx.owner.linkedClass.ensureCompleted()
+ }
+ else ensureCompletions(ctx.outer)
+
+ /** Return info of original symbol, where all references to siblings of the
+ * original symbol (i.e. sibling and original symbol have the same owner)
+ * are rewired to same-named parameters or accessors in the scope enclosing
+ * the current scope. The current scope is the scope owned by the defined symbol
+ * itself, that's why we have to look one scope further out. If the resulting
+ * type is an alias type, dealias it. This is necessary because the
+ * accessor of a type parameter is a private type alias that cannot be accessed
+ * from subclasses.
+ */
+ def derivedType(sym: Symbol)(implicit ctx: Context) = {
+ val relocate = new TypeMap {
+ val originalOwner = sym.owner
+ def apply(tp: Type) = tp match {
+ case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) =>
+ val defctx = ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next
+ var local = defctx.denotNamed(tp.name).suchThat(_ is ParamOrAccessor).symbol
+ if (local.exists) (defctx.owner.thisType select local).dealias
+ else throw new java.lang.Error(
+ s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope}"
+ )
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ relocate(sym.info)
+ }
+ }
+
+ /** A type definition copied from `tdef` with a rhs typetree derived from it */
+ def derivedTypeParam(tdef: TypeDef) =
+ cpy.TypeDef(tdef)(
+ rhs = new DerivedFromParamTree() withPos tdef.rhs.pos watching tdef)
+
+ /** A value definition copied from `vdef` with a tpt typetree derived from it */
+ def derivedTermParam(vdef: ValDef) =
+ cpy.ValDef(vdef)(
+ tpt = new DerivedFromParamTree() withPos vdef.tpt.pos watching vdef)
+
+// ----- Desugar methods -------------------------------------------------
+
+ /** var x: Int = expr
+ * ==>
+ * def x: Int = expr
+ * def x_=($1: <TypeTree()>): Unit = ()
+ */
+ def valDef(vdef: ValDef)(implicit ctx: Context): Tree = {
+ val ValDef(name, tpt, rhs) = vdef
+ val mods = vdef.mods
+ def setterNeeded =
+ (mods is Mutable) && ctx.owner.isClass && (!(mods is PrivateLocal) || (ctx.owner is Trait))
+ if (setterNeeded) {
+ // todo: copy of vdef as getter needed?
+ // val getter = ValDef(mods, name, tpt, rhs) withPos vdef.pos ?
+ // right now vdef maps via expandedTree to a thicket which concerns itself.
+ // I don't see a problem with that but if there is one we can avoid it by making a copy here.
+ val setterParam = makeSyntheticParameter(tpt = (new SetterParamTree).watching(vdef))
+ val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral
+ val setter = cpy.DefDef(vdef)(
+ name = name.setterName,
+ tparams = Nil,
+ vparamss = (setterParam :: Nil) :: Nil,
+ tpt = TypeTree(defn.UnitType),
+ rhs = setterRhs
+ ).withMods((mods | Accessor) &~ CaseAccessor) // rhs gets filled in later, when field is generated and getter has parameters
+ Thicket(vdef, setter)
+ }
+ else vdef
+ }
+
+ /** Expand context bounds to evidence params. E.g.,
+ *
+ * def f[T >: L <: H : B](params)
+ * ==>
+ * def f[T >: L <: H](params)(implicit evidence$0: B[T])
+ *
+ * Expand default arguments to default getters. E.g,
+ *
+ * def f[T: B](x: Int = 1)(y: String = x + "m") = ...
+ * ==>
+ * def f[T](x: Int)(y: String)(implicit evidence$0: B[T]) = ...
+ * def f$default$1[T] = 1
+ * def f$default$2[T](x: Int) = x + "m"
+ */
+ def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(implicit ctx: Context): Tree = {
+ val DefDef(name, tparams, vparamss, tpt, rhs) = meth
+ val mods = meth.mods
+ val epbuf = new ListBuffer[ValDef]
+ val tparams1 = tparams mapConserve {
+ case tparam @ TypeDef(_, ContextBounds(tbounds, cxbounds)) =>
+ for (cxbound <- cxbounds) {
+ val paramFlags: FlagSet = if (isPrimaryConstructor) PrivateLocalParamAccessor else Param
+ val epname = ctx.freshName(nme.EVIDENCE_PARAM_PREFIX).toTermName
+ epbuf += ValDef(epname, cxbound, EmptyTree).withFlags(paramFlags | Implicit)
+ }
+ cpy.TypeDef(tparam)(rhs = tbounds)
+ case tparam =>
+ tparam
+ }
+
+ val meth1 = addEvidenceParams(cpy.DefDef(meth)(tparams = tparams1), epbuf.toList)
+
+ /** The longest prefix of parameter lists in vparamss whose total length does not exceed `n` */
+ def takeUpTo(vparamss: List[List[ValDef]], n: Int): List[List[ValDef]] = vparamss match {
+ case vparams :: vparamss1 =>
+ val len = vparams.length
+ if (n >= len) vparams :: takeUpTo(vparamss1, n - len) else Nil
+ case _ =>
+ Nil
+ }
+
+ def normalizedVparamss = meth1.vparamss map (_ map (vparam =>
+ cpy.ValDef(vparam)(rhs = EmptyTree)))
+
+ def dropContextBound(tparam: TypeDef) = tparam.rhs match {
+ case ContextBounds(tbounds, _) => cpy.TypeDef(tparam)(rhs = tbounds)
+ case _ => tparam
+ }
+
+ def defaultGetters(vparamss: List[List[ValDef]], n: Int): List[DefDef] = vparamss match {
+ case (vparam :: vparams) :: vparamss1 =>
+ def defaultGetter: DefDef =
+ DefDef(
+ name = meth.name.defaultGetterName(n),
+ tparams = meth.tparams.map(tparam => dropContextBound(toDefParam(tparam))),
+ vparamss = takeUpTo(normalizedVparamss, n),
+ tpt = TypeTree(),
+ rhs = vparam.rhs
+ ).withMods(Modifiers(mods.flags & AccessFlags, mods.privateWithin))
+ val rest = defaultGetters(vparams :: vparamss1, n + 1)
+ if (vparam.rhs.isEmpty) rest else defaultGetter :: rest
+ case Nil :: vparamss1 =>
+ defaultGetters(vparamss1, n)
+ case nil =>
+ Nil
+ }
+
+ val defGetters = defaultGetters(vparamss, 0)
+ if (defGetters.isEmpty) meth1
+ else {
+ val meth2 = cpy.DefDef(meth1)(vparamss = normalizedVparamss)
+ .withMods(meth1.mods | DefaultParameterized)
+ Thicket(meth2 :: defGetters)
+ }
+ }
+
+ // Add all evidence parameters in `params` as implicit parameters to `meth` */
+ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(implicit ctx: Context): DefDef =
+ params match {
+ case Nil =>
+ meth
+ case evidenceParams =>
+ val vparamss1 = meth.vparamss.reverse match {
+ case (vparams @ (vparam :: _)) :: rvparamss if vparam.mods is Implicit =>
+ ((vparams ++ evidenceParams) :: rvparamss).reverse
+ case _ =>
+ meth.vparamss :+ evidenceParams
+ }
+ cpy.DefDef(meth)(vparamss = vparamss1)
+ }
+
+ /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */
+ private def evidenceParams(meth: DefDef)(implicit ctx: Context): List[ValDef] =
+ meth.vparamss.reverse match {
+ case (vparams @ (vparam :: _)) :: _ if vparam.mods is Implicit =>
+ vparams.dropWhile(!_.name.startsWith(nme.EVIDENCE_PARAM_PREFIX))
+ case _ =>
+ Nil
+ }
+
+ /** Fill in empty type bounds with Nothing/Any. Expand private local type parameters as follows:
+ *
+ * class C[v T]
+ * ==>
+ * class C { type v C$T; type v T = C$T }
+ */
+ def typeDef(tdef: TypeDef)(implicit ctx: Context): Tree = {
+ if (tdef.mods is PrivateLocalParam) {
+ val tparam = cpy.TypeDef(tdef)(name = tdef.name.expandedName(ctx.owner))
+ .withMods(tdef.mods &~ PrivateLocal | ExpandedName)
+ val alias = cpy.TypeDef(tdef)(rhs = refOfDef(tparam))
+ .withMods(tdef.mods & VarianceFlags | PrivateLocalParamAccessor | Synthetic)
+ Thicket(tparam, alias)
+ }
+ else tdef
+ }
+
+ @sharable private val synthetic = Modifiers(Synthetic)
+
+ private def toDefParam(tparam: TypeDef): TypeDef =
+ tparam.withMods(tparam.rawMods & EmptyFlags | Param)
+ private def toDefParam(vparam: ValDef): ValDef =
+ vparam.withMods(vparam.rawMods & Implicit | Param)
+
+ /** The expansion of a class definition. See inline comments for what is involved */
+ def classDef(cdef: TypeDef)(implicit ctx: Context): Tree = {
+ val className = checkNotReservedName(cdef).asTypeName
+ val impl @ Template(constr0, parents, self, _) = cdef.rhs
+ val mods = cdef.mods
+ val companionMods = mods.withFlags((mods.flags & AccessFlags).toCommonFlags)
+
+ val (constr1, defaultGetters) = defDef(constr0, isPrimaryConstructor = true) match {
+ case meth: DefDef => (meth, Nil)
+ case Thicket((meth: DefDef) :: defaults) => (meth, defaults)
+ }
+
+ // The original type and value parameters in the constructor already have the flags
+ // needed to be type members (i.e. param, and possibly also private and local unless
+ // prefixed by type or val). `tparams` and `vparamss` are the type parameters that
+ // go in `constr`, the constructor after desugaring.
+
+ /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */
+ def isAnyVal(tree: Tree): Boolean = tree match {
+ case Ident(tpnme.AnyVal) => true
+ case Select(qual, tpnme.AnyVal) => isScala(qual)
+ case _ => false
+ }
+ def isScala(tree: Tree): Boolean = tree match {
+ case Ident(nme.scala_) => true
+ case Select(Ident(nme.ROOTPKG), nme.scala_) => true
+ case _ => false
+ }
+
+ val isCaseClass = mods.is(Case) && !mods.is(Module)
+ val isValueClass = parents.nonEmpty && isAnyVal(parents.head)
+ // This is not watertight, but `extends AnyVal` will be replaced by `inline` later.
+
+ val constrTparams = constr1.tparams map toDefParam
+ val constrVparamss =
+ if (constr1.vparamss.isEmpty) { // ensure parameter list is non-empty
+ if (isCaseClass)
+ ctx.error(CaseClassMissingParamList(cdef), cdef.namePos)
+ ListOfNil
+ }
+ else constr1.vparamss.nestedMap(toDefParam)
+ val constr = cpy.DefDef(constr1)(tparams = constrTparams, vparamss = constrVparamss)
+
+ // Add constructor type parameters and evidence implicit parameters
+ // to auxiliary constructors
+ val normalizedBody = impl.body map {
+ case ddef: DefDef if ddef.name.isConstructorName =>
+ addEvidenceParams(
+ cpy.DefDef(ddef)(tparams = constrTparams),
+ evidenceParams(constr1).map(toDefParam))
+ case stat =>
+ stat
+ }
+
+ val derivedTparams = constrTparams map derivedTypeParam
+ val derivedVparamss = constrVparamss nestedMap derivedTermParam
+ val arity = constrVparamss.head.length
+
+ var classTycon: Tree = EmptyTree
+
+ // a reference to the class type, with all parameters given.
+ val classTypeRef/*: Tree*/ = {
+ // -language:keepUnions difference: classTypeRef needs type annotation, otherwise
+ // infers Ident | AppliedTypeTree, which
+ // renders the :\ in companions below untypable.
+ classTycon = (new TypeRefTree) withPos cdef.pos.startPos // watching is set at end of method
+ val tparams = impl.constr.tparams
+ if (tparams.isEmpty) classTycon else AppliedTypeTree(classTycon, tparams map refOfDef)
+ }
+
+ // new C[Ts](paramss)
+ lazy val creatorExpr = New(classTypeRef, constrVparamss nestedMap refOfDef)
+
+ // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams)
+ // def isDefined = true
+ // def productArity = N
+ // def _1 = this.p1
+ // ...
+ // def _N = this.pN
+ // def copy(p1: T1 = p1: @uncheckedVariance, ...,
+ // pN: TN = pN: @uncheckedVariance)(moreParams) =
+ // new C[...](p1, ..., pN)(moreParams)
+ //
+ // Note: copy default parameters need @uncheckedVariance; see
+ // neg/t1843-variances.scala for a test case. The test would give
+ // two errors without @uncheckedVariance, one of them spurious.
+ val caseClassMeths =
+ if (isCaseClass) {
+ def syntheticProperty(name: TermName, rhs: Tree) =
+ DefDef(name, Nil, Nil, TypeTree(), rhs).withMods(synthetic)
+ val isDefinedMeth = syntheticProperty(nme.isDefined, Literal(Constant(true)))
+ val caseParams = constrVparamss.head.toArray
+ val productElemMeths = for (i <- 0 until arity) yield
+ syntheticProperty(nme.selectorName(i), Select(This(EmptyTypeIdent), caseParams(i).name))
+ def isRepeated(tree: Tree): Boolean = tree match {
+ case PostfixOp(_, nme.raw.STAR) => true
+ case ByNameTypeTree(tree1) => isRepeated(tree1)
+ case _ => false
+ }
+ val hasRepeatedParam = constrVparamss.exists(_.exists {
+ case ValDef(_, tpt, _) => isRepeated(tpt)
+ case _ => false
+ })
+
+ val copyMeths =
+ if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued
+ else {
+ def copyDefault(vparam: ValDef) =
+ makeAnnotated(defn.UncheckedVarianceAnnot, refOfDef(vparam))
+ val copyFirstParams = derivedVparamss.head.map(vparam =>
+ cpy.ValDef(vparam)(rhs = copyDefault(vparam)))
+ val copyRestParamss = derivedVparamss.tail.nestedMap(vparam =>
+ cpy.ValDef(vparam)(rhs = EmptyTree))
+ DefDef(nme.copy, derivedTparams, copyFirstParams :: copyRestParamss, TypeTree(), creatorExpr)
+ .withMods(synthetic) :: Nil
+ }
+ copyMeths ::: isDefinedMeth :: productElemMeths.toList
+ }
+ else Nil
+
+ def anyRef = ref(defn.AnyRefAlias.typeRef)
+ def productConstr(n: Int) = {
+ val tycon = scalaDot((tpnme.Product.toString + n).toTypeName)
+ val targs = constrVparamss.head map (_.tpt)
+ if (targs.isEmpty) tycon else AppliedTypeTree(tycon, targs)
+ }
+
+ // Case classes and case objects get a ProductN parent
+ var parents1 = parents
+ if (mods.is(Case) && arity <= Definitions.MaxTupleArity)
+ parents1 = parents1 :+ productConstr(arity)
+
+ // The thicket which is the desugared version of the companion object
+ // synthetic object C extends parentTpt { defs }
+ def companionDefs(parentTpt: Tree, defs: List[Tree]) =
+ moduleDef(
+ ModuleDef(
+ className.toTermName, Template(emptyConstructor, parentTpt :: Nil, EmptyValDef, defs))
+ .withMods(companionMods | Synthetic))
+ .withPos(cdef.pos).toList
+
+ // The companion object definitions, if a companion is needed, Nil otherwise.
+ // companion definitions include:
+ // 1. If class is a case class case class C[Ts](p1: T1, ..., pN: TN)(moreParams):
+ // def apply[Ts](p1: T1, ..., pN: TN)(moreParams) = new C[Ts](p1, ..., pN)(moreParams) (unless C is abstract)
+ // def unapply[Ts]($1: C[Ts]) = $1
+ // 2. The default getters of the constructor
+ // The parent of the companion object of a non-parameterized case class
+ // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C
+ // For all other classes, the parent is AnyRef.
+ val companions =
+ if (isCaseClass) {
+ val parent =
+ if (constrTparams.nonEmpty ||
+ constrVparamss.length > 1 ||
+ mods.is(Abstract) ||
+ constr.mods.is(Private)) anyRef
+ // todo: also use anyRef if constructor has a dependent method type (or rule that out)!
+ else (constrVparamss :\ classTypeRef) ((vparams, restpe) => Function(vparams map (_.tpt), restpe))
+ val applyMeths =
+ if (mods is Abstract) Nil
+ else
+ DefDef(nme.apply, derivedTparams, derivedVparamss, TypeTree(), creatorExpr)
+ .withFlags(Synthetic | (constr1.mods.flags & DefaultParameterized)) :: Nil
+ val unapplyMeth = {
+ val unapplyParam = makeSyntheticParameter(tpt = classTypeRef)
+ val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name)
+ DefDef(nme.unapply, derivedTparams, (unapplyParam :: Nil) :: Nil, TypeTree(), unapplyRHS)
+ .withMods(synthetic)
+ }
+ companionDefs(parent, applyMeths ::: unapplyMeth :: defaultGetters)
+ }
+ else if (defaultGetters.nonEmpty)
+ companionDefs(anyRef, defaultGetters)
+ else if (isValueClass)
+ companionDefs(anyRef, Nil)
+ else Nil
+
+
+ // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method
+ // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] =
+ // new C[Ts](p11, ..., p1N) ... (pM1, ..., pMN) =
+ val implicitWrappers =
+ if (!mods.is(Implicit))
+ Nil
+ else if (ctx.owner is Package) {
+ ctx.error(TopLevelImplicitClass(cdef), cdef.pos)
+ Nil
+ }
+ else if (isCaseClass) {
+ ctx.error(ImplicitCaseClass(cdef), cdef.pos)
+ Nil
+ }
+ else
+ // implicit wrapper is typechecked in same scope as constructor, so
+ // we can reuse the constructor parameters; no derived params are needed.
+ DefDef(className.toTermName, constrTparams, constrVparamss, classTypeRef, creatorExpr)
+ .withMods(companionMods | Synthetic | Implicit)
+ .withPos(cdef.pos) :: Nil
+
+ val self1 = {
+ val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt
+ if (self.isEmpty) self
+ else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName)
+ }
+
+ val cdef1 = {
+ val originalTparams = constr1.tparams.toIterator
+ val originalVparams = constr1.vparamss.toIterator.flatten
+ val tparamAccessors = derivedTparams.map(_.withMods(originalTparams.next.mods))
+ val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags
+ val vparamAccessors = derivedVparamss.flatten.map(_.withMods(originalVparams.next.mods | caseAccessor))
+ cpy.TypeDef(cdef)(
+ name = className,
+ rhs = cpy.Template(impl)(constr, parents1, self1,
+ tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths))
+ }
+
+ // install the watch on classTycon
+ classTycon match {
+ case tycon: DerivedTypeTree => tycon.watching(cdef1)
+ case _ =>
+ }
+
+ flatTree(cdef1 :: companions ::: implicitWrappers)
+ }
+
+ val AccessOrSynthetic = AccessFlags | Synthetic
+
+ /** Expand
+ *
+ * object name extends parents { self => body }
+ *
+ * to:
+ * <module> val name: name$ = New(name$)
+ * <module> final class name$ extends parents { self: name.type => body }
+ */
+ def moduleDef(mdef: ModuleDef)(implicit ctx: Context): Tree = {
+ val moduleName = checkNotReservedName(mdef).asTermName
+ val tmpl = mdef.impl
+ val mods = mdef.mods
+ if (mods is Package)
+ PackageDef(Ident(moduleName), cpy.ModuleDef(mdef)(nme.PACKAGE, tmpl).withMods(mods &~ Package) :: Nil)
+ else {
+ val clsName = moduleName.moduleClassName
+ val clsRef = Ident(clsName)
+ val modul = ValDef(moduleName, clsRef, New(clsRef, Nil))
+ .withMods(mods | ModuleCreationFlags | mods.flags & AccessFlags)
+ .withPos(mdef.pos)
+ val ValDef(selfName, selfTpt, _) = tmpl.self
+ val selfMods = tmpl.self.mods
+ if (!selfTpt.isEmpty) ctx.error(ObjectMayNotHaveSelfType(mdef), tmpl.self.pos)
+ val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(moduleName)), tmpl.self.rhs)
+ .withMods(selfMods)
+ .withPos(tmpl.self.pos orElse tmpl.pos.startPos)
+ val clsTmpl = cpy.Template(tmpl)(self = clsSelf, body = tmpl.body)
+ val cls = TypeDef(clsName, clsTmpl)
+ .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags)
+ Thicket(modul, classDef(cls).withPos(mdef.pos))
+ }
+ }
+
+ /** The name of `mdef`, after checking that it does not redefine a Scala core class.
+ * If it does redefine, issue an error and return a mangled name instead of the original one.
+ */
+ def checkNotReservedName(mdef: MemberDef)(implicit ctx: Context): Name = {
+ val name = mdef.name
+ if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) {
+ def kind = if (name.isTypeName) "class" else "object"
+ ctx.error(em"illegal redefinition of standard $kind $name", mdef.pos)
+ name.errorName
+ }
+ else name
+ }
+
+ /** val p1, ..., pN: T = E
+ * ==>
+ * makePatDef[[val p1: T1 = E]]; ...; makePatDef[[val pN: TN = E]]
+ */
+ def patDef(pdef: PatDef)(implicit ctx: Context): Tree = {
+ val PatDef(mods, pats, tpt, rhs) = pdef
+ val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt))
+ flatTree(pats1 map (makePatDef(pdef, mods, _, rhs)))
+ }
+
+ /** If `pat` is a variable pattern,
+ *
+ * val/var/lazy val p = e
+ *
+ * Otherwise, in case there is exactly one variable x_1 in pattern
+ * val/var/lazy val p = e ==> val/var/lazy val x_1 = (e: @unchecked) match (case p => (x_1))
+ *
+ * in case there are zero or more than one variables in pattern
+ * val/var/lazy p = e ==> private synthetic [lazy] val t$ = (e: @unchecked) match (case p => (x_1, ..., x_N))
+ * val/var/def x_1 = t$._1
+ * ...
+ * val/var/def x_N = t$._N
+ * If the original pattern variable carries a type annotation, so does the corresponding
+ * ValDef or DefDef.
+ */
+ def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(implicit ctx: Context): Tree = pat match {
+ case VarPattern(named, tpt) =>
+ derivedValDef(original, named, tpt, rhs, mods)
+ case _ =>
+ val rhsUnchecked = makeAnnotated(defn.UncheckedAnnot, rhs)
+ val vars = getVariables(pat)
+ val isMatchingTuple: Tree => Boolean = {
+ case Tuple(es) => es.length == vars.length
+ case _ => false
+ }
+ val ids = for ((named, _) <- vars) yield Ident(named.name)
+ val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids))
+ val matchExpr =
+ if (forallResults(rhs, isMatchingTuple)) rhs
+ else Match(rhsUnchecked, caseDef :: Nil)
+ vars match {
+ case Nil =>
+ matchExpr
+ case (named, tpt) :: Nil =>
+ derivedValDef(original, named, tpt, matchExpr, mods)
+ case _ =>
+ val tmpName = ctx.freshName().toTermName
+ val patMods = mods & (AccessFlags | Lazy) | Synthetic
+ val firstDef =
+ ValDef(tmpName, TypeTree(), matchExpr)
+ .withPos(pat.pos.union(rhs.pos)).withMods(patMods)
+ def selector(n: Int) = Select(Ident(tmpName), nme.selectorName(n))
+ val restDefs =
+ for (((named, tpt), n) <- vars.zipWithIndex)
+ yield
+ if (mods is Lazy) derivedDefDef(original, named, tpt, selector(n), mods &~ Lazy)
+ else derivedValDef(original, named, tpt, selector(n), mods)
+ flatTree(firstDef :: restDefs)
+ }
+ }
+
+ /** Expand variable identifier x to x @ _ */
+ def patternVar(tree: Tree)(implicit ctx: Context) = {
+ val Ident(name) = tree
+ Bind(name, Ident(nme.WILDCARD)).withPos(tree.pos)
+ }
+
+ def defTree(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case tree: ValDef => valDef(tree)
+ case tree: TypeDef => if (tree.isClassDef) classDef(tree) else typeDef(tree)
+ case tree: DefDef => defDef(tree)
+ case tree: ModuleDef => moduleDef(tree)
+ case tree: PatDef => patDef(tree)
+ }
+
+ /** { stats; <empty > }
+ * ==>
+ * { stats; () }
+ */
+ def block(tree: Block)(implicit ctx: Context): Block = tree.expr match {
+ case EmptyTree =>
+ cpy.Block(tree)(tree.stats,
+ unitLiteral withPos (if (tree.stats.isEmpty) tree.pos else tree.pos.endPos))
+ case _ =>
+ tree
+ }
+
+ /** EmptyTree in lower bound ==> Nothing
+ * EmptyTree in upper bounds ==> Any
+ */
+ def typeBoundsTree(tree: TypeBoundsTree)(implicit ctx: Context): TypeBoundsTree = {
+ val TypeBoundsTree(lo, hi) = tree
+ val lo1 = if (lo.isEmpty) untpd.TypeTree(defn.NothingType) else lo
+ val hi1 = if (hi.isEmpty) untpd.TypeTree(defn.AnyType) else hi
+ cpy.TypeBoundsTree(tree)(lo1, hi1)
+ }
+
+ /** Make closure corresponding to function.
+ * params => body
+ * ==>
+ * def $anonfun(params) = body
+ * Closure($anonfun)
+ *
+ * If `inlineable` is true, tag $anonfun with an @inline annotation.
+ */
+ def makeClosure(params: List[ValDef], body: Tree, tpt: Tree = TypeTree(), inlineable: Boolean)(implicit ctx: Context) = {
+ var mods = synthetic
+ if (inlineable) mods |= Inline
+ Block(
+ DefDef(nme.ANON_FUN, Nil, params :: Nil, tpt, body).withMods(mods),
+ Closure(Nil, Ident(nme.ANON_FUN), EmptyTree))
+ }
+
+ /** If `nparams` == 1, expand partial function
+ *
+ * { cases }
+ * ==>
+ * x$1 => (x$1 @unchecked) match { cases }
+ *
+ * If `nparams` != 1, expand instead to
+ *
+ * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked) match { cases }
+ */
+ def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1, unchecked: Boolean = true)(implicit ctx: Context) = {
+ val params = (1 to nparams).toList.map(makeSyntheticParameter(_))
+ val selector = makeTuple(params.map(p => Ident(p.name)))
+
+ if (unchecked)
+ Function(params, Match(Annotated(selector, New(ref(defn.UncheckedAnnotType))), cases))
+ else
+ Function(params, Match(selector, cases))
+ }
+
+ /** Map n-ary function `(p1, ..., pn) => body` where n != 1 to unary function as follows:
+ *
+ * x$1 => {
+ * def p1 = x$1._1
+ * ...
+ * def pn = x$1._n
+ * body
+ * }
+ */
+ def makeTupledFunction(params: List[ValDef], body: Tree)(implicit ctx: Context): Tree = {
+ val param = makeSyntheticParameter()
+ def selector(n: Int) = Select(refOfDef(param), nme.selectorName(n))
+ val vdefs =
+ params.zipWithIndex.map{
+ case (param, idx) =>
+ DefDef(param.name, Nil, Nil, TypeTree(), selector(idx)).withPos(param.pos)
+ }
+ Function(param :: Nil, Block(vdefs, body))
+ }
+
+ /** Add annotation with class `cls` to tree:
+ * tree @cls
+ */
+ def makeAnnotated(cls: Symbol, tree: Tree)(implicit ctx: Context) =
+ Annotated(tree, untpd.New(untpd.TypeTree(cls.typeRef), Nil))
+
+ private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit ctx: Context) = {
+ val vdef = ValDef(named.name.asTermName, tpt, rhs)
+ .withMods(mods)
+ .withPos(original.pos.withPoint(named.pos.start))
+ val mayNeedSetter = valDef(vdef)
+ mayNeedSetter
+ }
+
+ private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers) =
+ DefDef(named.name.asTermName, Nil, Nil, tpt, rhs)
+ .withMods(mods)
+ .withPos(original.pos.withPoint(named.pos.start))
+
+ /** Main desugaring method */
+ def apply(tree: Tree)(implicit ctx: Context): Tree = {
+
+ /** { label def lname(): Unit = rhs; call }
+ */
+ def labelDefAndCall(lname: TermName, rhs: Tree, call: Tree) = {
+ val ldef = DefDef(lname, Nil, ListOfNil, TypeTree(defn.UnitType), rhs).withFlags(Label)
+ Block(ldef, call)
+ }
+
+ /** Translate infix operation expression left op right
+ */
+ def makeBinop(left: Tree, op: Name, right: Tree): Tree = {
+ def assignToNamedArg(arg: Tree) = arg match {
+ case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs)
+ case _ => arg
+ }
+ if (isLeftAssoc(op)) {
+ val args: List[Tree] = right match {
+ case Parens(arg) => assignToNamedArg(arg) :: Nil
+ case Tuple(args) => args mapConserve assignToNamedArg
+ case _ => right :: Nil
+ }
+ Apply(Select(left, op), args)
+ } else {
+ val x = ctx.freshName().toTermName
+ new InfixOpBlock(
+ ValDef(x, TypeTree(), left).withMods(synthetic),
+ Apply(Select(right, op), Ident(x)))
+ }
+ }
+
+ /** Create tree for for-comprehension `<for (enums) do body>` or
+ * `<for (enums) yield body>` where mapName and flatMapName are chosen
+ * corresponding to whether this is a for-do or a for-yield.
+ * The creation performs the following rewrite rules:
+ *
+ * 1.
+ *
+ * for (P <- G) E ==> G.foreach (P => E)
+ *
+ * Here and in the following (P => E) is interpreted as the function (P => E)
+ * if P is a variable pattern and as the partial function { case P => E } otherwise.
+ *
+ * 2.
+ *
+ * for (P <- G) yield E ==> G.map (P => E)
+ *
+ * 3.
+ *
+ * for (P_1 <- G_1; P_2 <- G_2; ...) ...
+ * ==>
+ * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
+ *
+ * 4.
+ *
+ * for (P <- G; E; ...) ...
+ * =>
+ * for (P <- G.filter (P => E); ...) ...
+ *
+ * 5. For any N:
+ *
+ * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
+ * ==>
+ * for (TupleN(P_1, P_2, ... P_N) <-
+ * for (x_1 @ P_1 <- G) yield {
+ * val x_2 @ P_2 = E_2
+ * ...
+ * val x_N & P_N = E_N
+ * TupleN(x_1, ..., x_N)
+ * } ...)
+ *
+ * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated
+ * and the variable constituting P_i is used instead of x_i
+ *
+ * @param mapName The name to be used for maps (either map or foreach)
+ * @param flatMapName The name to be used for flatMaps (either flatMap or foreach)
+ * @param enums The enumerators in the for expression
+ * @param body The body of the for expression
+ */
+ def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = ctx.traceIndented(i"make for ${ForYield(enums, body)}", show = true) {
+
+ /** Make a function value pat => body.
+ * If pat is a var pattern id: T then this gives (id: T) => body
+ * Otherwise this gives { case pat => body }
+ */
+ def makeLambda(pat: Tree, body: Tree): Tree = pat match {
+ case VarPattern(named, tpt) =>
+ Function(derivedValDef(pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body)
+ case _ =>
+ makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil, unchecked = false)
+ }
+
+ /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap
+ * it in a Bind with a fresh name. Return the transformed pattern, and the identifier
+ * that refers to the bound variable for the pattern.
+ */
+ def makeIdPat(pat: Tree): (Tree, Ident) = pat match {
+ case Bind(name, _) => (pat, Ident(name))
+ case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => (id, id)
+ case Typed(id: Ident, _) if isVarPattern(id) && id.name != nme.WILDCARD => (pat, id)
+ case _ =>
+ val name = ctx.freshName().toTermName
+ (Bind(name, pat), Ident(name))
+ }
+
+ /** Add MaybeFilter attachment */
+ def orFilter(tree: Tree): tree.type = {
+ tree.putAttachment(MaybeFilter, ())
+ tree
+ }
+
+ /** Make a pattern filter:
+ * rhs.withFilter { case pat => true case _ => false }
+ *
+ * On handling irrefutable patterns:
+ * The idea is to wait until the pattern matcher sees a call
+ *
+ * xs withFilter { cases }
+ *
+ * where cases can be proven to be refutable i.e. cases would be
+ * equivalent to { case _ => true }
+ *
+ * In that case, compile to
+ *
+ * xs withFilter alwaysTrue
+ *
+ * where `alwaysTrue` is a predefined function value:
+ *
+ * val alwaysTrue: Any => Boolean = true
+ *
+ * In the libraries operations can take advantage of alwaysTrue to shortcircuit the
+ * withFilter call.
+ *
+ * def withFilter(f: Elem => Boolean) =
+ * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this
+ * else real withFilter
+ */
+ def makePatFilter(rhs: Tree, pat: Tree): Tree = {
+ val cases = List(
+ CaseDef(pat, EmptyTree, Literal(Constant(true))),
+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))))
+ Apply(orFilter(Select(rhs, nme.withFilter)), makeCaseLambda(cases))
+ }
+
+ /** Is pattern `pat` irrefutable when matched against `rhs`?
+ * We only can do a simple syntactic check here; a more refined check
+ * is done later in the pattern matcher (see discussion in @makePatFilter).
+ */
+ def isIrrefutable(pat: Tree, rhs: Tree): Boolean = {
+ def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match {
+ case Tuple(trees) => (pats corresponds trees)(isIrrefutable)
+ case Parens(rhs1) => matchesTuple(pats, rhs1)
+ case Block(_, rhs1) => matchesTuple(pats, rhs1)
+ case If(_, thenp, elsep) => matchesTuple(pats, thenp) && matchesTuple(pats, elsep)
+ case Match(_, cases) => cases forall (matchesTuple(pats, _))
+ case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1)
+ case Throw(_) => true
+ case _ => false
+ }
+ pat match {
+ case Bind(_, pat1) => isIrrefutable(pat1, rhs)
+ case Parens(pat1) => isIrrefutable(pat1, rhs)
+ case Tuple(pats) => matchesTuple(pats, rhs)
+ case _ => isVarPattern(pat)
+ }
+ }
+
+ def isIrrefutableGenFrom(gen: GenFrom): Boolean =
+ gen.isInstanceOf[IrrefutableGenFrom] || isIrrefutable(gen.pat, gen.expr)
+
+ /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when
+ * matched against `rhs`.
+ */
+ def rhsSelect(gen: GenFrom, name: TermName) = {
+ val rhs = if (isIrrefutableGenFrom(gen)) gen.expr else makePatFilter(gen.expr, gen.pat)
+ Select(rhs, name)
+ }
+
+ enums match {
+ case (gen: GenFrom) :: Nil =>
+ Apply(rhsSelect(gen, mapName), makeLambda(gen.pat, body))
+ case (gen: GenFrom) :: (rest @ (GenFrom(_, _) :: _)) =>
+ val cont = makeFor(mapName, flatMapName, rest, body)
+ Apply(rhsSelect(gen, flatMapName), makeLambda(gen.pat, cont))
+ case (enum @ GenFrom(pat, rhs)) :: (rest @ GenAlias(_, _) :: _) =>
+ val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias])
+ val pats = valeqs map { case GenAlias(pat, _) => pat }
+ val rhss = valeqs map { case GenAlias(_, rhs) => rhs }
+ val (defpat0, id0) = makeIdPat(pat)
+ val (defpats, ids) = (pats map makeIdPat).unzip
+ val pdefs = (valeqs, defpats, rhss).zipped.map(makePatDef(_, Modifiers(), _, _))
+ val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, rhs) :: Nil, Block(pdefs, makeTuple(id0 :: ids)))
+ val allpats = pat :: pats
+ val vfrom1 = new IrrefutableGenFrom(makeTuple(allpats), rhs1)
+ makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
+ case (gen: GenFrom) :: test :: rest =>
+ val filtered = Apply(orFilter(rhsSelect(gen, nme.withFilter)), makeLambda(gen.pat, test))
+ val genFrom =
+ if (isIrrefutableGenFrom(gen)) new IrrefutableGenFrom(gen.pat, filtered)
+ else GenFrom(gen.pat, filtered)
+ makeFor(mapName, flatMapName, genFrom :: rest, body)
+ case _ =>
+ EmptyTree //may happen for erroneous input
+ }
+ }
+
+ // begin desugar
+ tree match {
+ case SymbolLit(str) =>
+ Apply(
+ ref(defn.SymbolClass.companionModule.termRef),
+ Literal(Constant(str)) :: Nil)
+ case InterpolatedString(id, segments) =>
+ val strs = segments map {
+ case ts: Thicket => ts.trees.head
+ case t => t
+ }
+ val elems = segments flatMap {
+ case ts: Thicket => ts.trees.tail
+ case t => Nil
+ }
+ Apply(Select(Apply(Ident(nme.StringContext), strs), id), elems)
+ case InfixOp(l, op, r) =>
+ if (ctx.mode is Mode.Type)
+ if (op == tpnme.raw.AMP) AndTypeTree(l, r) // l & r
+ else if (op == tpnme.raw.BAR) OrTypeTree(l, r) // l | r
+ else AppliedTypeTree(Ident(op), l :: r :: Nil) // op[l, r]
+ else if (ctx.mode is Mode.Pattern)
+ Apply(Ident(op), l :: r :: Nil) // op(l, r)
+ else // l.op(r), or val x = r; l.op(x), plus handle named args specially
+ makeBinop(l, op, r)
+ case PostfixOp(t, op) =>
+ if ((ctx.mode is Mode.Type) && op == nme.raw.STAR) {
+ val seqType = if (ctx.compilationUnit.isJava) defn.ArrayType else defn.SeqType
+ Annotated(
+ AppliedTypeTree(ref(seqType), t),
+ New(ref(defn.RepeatedAnnotType), Nil :: Nil))
+ } else {
+ assert(ctx.mode.isExpr || ctx.reporter.hasErrors, ctx.mode)
+ Select(t, op)
+ }
+ case PrefixOp(op, t) =>
+ Select(t, nme.UNARY_PREFIX ++ op)
+ case Parens(t) =>
+ t
+ case Tuple(ts) =>
+ val arity = ts.length
+ def tupleTypeRef = defn.TupleType(arity)
+ if (arity > Definitions.MaxTupleArity) {
+ ctx.error(TupleTooLong(ts), tree.pos)
+ unitLiteral
+ } else if (arity == 1) ts.head
+ else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
+ else if (arity == 0) unitLiteral
+ else Apply(ref(tupleTypeRef.classSymbol.companionModule.valRef), ts)
+ case WhileDo(cond, body) =>
+ // { <label> def while$(): Unit = if (cond) { body; while$() } ; while$() }
+ val call = Apply(Ident(nme.WHILE_PREFIX), Nil)
+ val rhs = If(cond, Block(body, call), unitLiteral)
+ labelDefAndCall(nme.WHILE_PREFIX, rhs, call)
+ case DoWhile(body, cond) =>
+ // { label def doWhile$(): Unit = { body; if (cond) doWhile$() } ; doWhile$() }
+ val call = Apply(Ident(nme.DO_WHILE_PREFIX), Nil)
+ val rhs = Block(body, If(cond, call, unitLiteral))
+ labelDefAndCall(nme.DO_WHILE_PREFIX, rhs, call)
+ case ForDo(enums, body) =>
+ makeFor(nme.foreach, nme.foreach, enums, body) orElse tree
+ case ForYield(enums, body) =>
+ makeFor(nme.map, nme.flatMap, enums, body) orElse tree
+ case PatDef(mods, pats, tpt, rhs) =>
+ val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt))
+ flatTree(pats1 map (makePatDef(tree, mods, _, rhs)))
+ case ParsedTry(body, handler, finalizer) =>
+ handler match {
+ case Match(EmptyTree, cases) => Try(body, cases, finalizer)
+ case EmptyTree => Try(body, Nil, finalizer)
+ case _ =>
+ Try(body,
+ List(CaseDef(Ident(nme.DEFAULT_EXCEPTION_NAME), EmptyTree, Apply(handler, Ident(nme.DEFAULT_EXCEPTION_NAME)))),
+ finalizer)
+ }
+
+ }
+ }.withPos(tree.pos)
+
+ /** Create a class definition with the same info as the refined type given by `parent`
+ * and `refinements`.
+ *
+ * parent { refinements }
+ * ==>
+ * trait <refinement> extends core { this: self => refinements }
+ *
+ * Here, `core` is the (possibly parameterized) class part of `parent`.
+ * If `parent` is the same as `core`, self is empty. Otherwise `self` is `parent`.
+ *
+ * Example: Given
+ *
+ * class C
+ * type T1 = C { type T <: A }
+ *
+ * the refined type
+ *
+ * T1 { type T <: B }
+ *
+ * is expanded to
+ *
+ * trait <refinement> extends C { this: T1 => type T <: A }
+ *
+ * The result of this method is used for validity checking, is thrown away afterwards.
+ * @param parent The type of `parent`
+ */
+ def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(implicit ctx: Context): TypeDef = {
+ def stripToCore(tp: Type): List[Type] = tp match {
+ case tp: RefinedType if tp.argInfos.nonEmpty => tp :: Nil // parameterized class type
+ case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type
+ case tp: TypeProxy => stripToCore(tp.underlying)
+ case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2)
+ case _ => defn.AnyType :: Nil
+ }
+ val parentCores = stripToCore(parent.tpe)
+ val untpdParent = TypedSplice(parent)
+ val (classParents, self) =
+ if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef)
+ else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree))
+ val impl = Template(emptyConstructor, classParents, self, refinements)
+ TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait)
+ }
+
+ /** If tree is a variable pattern, return its name and type, otherwise return None.
+ */
+ private object VarPattern {
+ def unapply(tree: Tree)(implicit ctx: Context): Option[VarInfo] = tree match {
+ case id: Ident => Some(id, TypeTree())
+ case Typed(id: Ident, tpt) => Some((id, tpt))
+ case _ => None
+ }
+ }
+
+ /** Returns list of all pattern variables, possibly with their types,
+ * without duplicates
+ */
+ private def getVariables(tree: Tree)(implicit ctx: Context): List[VarInfo] = {
+ val buf = new ListBuffer[VarInfo]
+ def seenName(name: Name) = buf exists (_._1.name == name)
+ def add(named: NameTree, t: Tree): Unit =
+ if (!seenName(named.name)) buf += ((named, t))
+ def collect(tree: Tree): Unit = tree match {
+ case Bind(nme.WILDCARD, tree1) =>
+ collect(tree1)
+ case tree @ Bind(_, Typed(tree1, tpt)) if !mayBeTypePat(tpt) =>
+ add(tree, tpt)
+ collect(tree1)
+ case tree @ Bind(_, tree1) =>
+ add(tree, TypeTree())
+ collect(tree1)
+ case Typed(id: Ident, t) if isVarPattern(id) && id.name != nme.WILDCARD && !isWildcardStarArg(tree) =>
+ add(id, t)
+ case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD =>
+ add(id, TypeTree())
+ case Apply(_, args) =>
+ args foreach collect
+ case Typed(expr, _) =>
+ collect(expr)
+ case NamedArg(_, arg) =>
+ collect(arg)
+ case SeqLiteral(elems, _) =>
+ elems foreach collect
+ case Alternative(trees) =>
+ for (tree <- trees; (vble, _) <- getVariables(tree))
+ ctx.error(IllegalVariableInPatternAlternative(), vble.pos)
+ case Annotated(arg, _) =>
+ collect(arg)
+ case InterpolatedString(_, segments) =>
+ segments foreach collect
+ case InfixOp(left, _, right) =>
+ collect(left)
+ collect(right)
+ case PrefixOp(_, od) =>
+ collect(od)
+ case Parens(tree) =>
+ collect(tree)
+ case Tuple(trees) =>
+ trees foreach collect
+ case _ =>
+ }
+ collect(tree)
+ buf.toList
+ }
+
+ private class IrrefutableGenFrom(pat: Tree, expr: Tree) extends GenFrom(pat, expr)
+}
diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala
new file mode 100644
index 000000000..33aa87d8e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala
@@ -0,0 +1,82 @@
+package dotty.tools.dotc
+package ast
+
+import core.Contexts.Context
+import core.Decorators._
+import util.Positions._
+import Trees.{MemberDef, DefTree}
+
+/** Utility functions to go from typed to untyped ASTs */
+object NavigateAST {
+
+ /** The untyped tree corresponding to typed tree `tree` in the compilation
+ * unit specified by `ctx`
+ */
+ def toUntyped(tree: tpd.Tree)(implicit ctx: Context): untpd.Tree =
+ untypedPath(tree, exactMatch = true) match {
+ case (utree: untpd.Tree) :: _ =>
+ utree
+ case _ =>
+ val loosePath = untypedPath(tree, exactMatch = false)
+ throw new
+ Error(i"""no untyped tree for $tree, pos = ${tree.pos}
+ |best matching path =\n$loosePath%\n====\n%
+ |path positions = ${loosePath.map(_.pos)}""")
+ }
+
+ /** The reverse path of untyped trees starting with a tree that closest matches
+ * `tree` and ending in the untyped tree at the root of the compilation unit
+ * specified by `ctx`.
+ * @param exactMatch If `true`, the path must start with a node that exactly
+ * matches `tree`, or `Nil` is returned.
+ * If `false` the path might start with a node enclosing
+ * the logical position of `tree`.
+ * Note: A complication concerns member definitions. ValDefs and DefDefs
+ * have after desugaring a position that spans just the name of the symbol being
+ * defined and nothing else. So we look instead for an untyped tree approximating the
+ * envelope of the definition, and declare success if we find another DefTree.
+ */
+ def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(implicit ctx: Context): List[Positioned] =
+ tree match {
+ case tree: MemberDef[_] =>
+ untypedPath(tree.pos) match {
+ case path @ (last: DefTree[_]) :: _ => path
+ case path if !exactMatch => path
+ case _ => Nil
+ }
+ case _ =>
+ untypedPath(tree.pos) match {
+ case (path @ last :: _) if last.pos == tree.pos || !exactMatch => path
+ case _ => Nil
+ }
+ }
+
+ /** The reverse part of the untyped root of the compilation unit of `ctx` to
+ * position `pos`.
+ */
+ def untypedPath(pos: Position)(implicit ctx: Context): List[Positioned] =
+ pathTo(pos, ctx.compilationUnit.untpdTree)
+
+
+ /** The reverse path from node `from` to the node that closest encloses position `pos`,
+ * or `Nil` if no such path exists. If a non-empty path is returned it starts with
+ * the node closest enclosing `pos` and ends with `from`.
+ */
+ def pathTo(pos: Position, from: Positioned)(implicit ctx: Context): List[Positioned] = {
+ def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = {
+ while (it.hasNext) {
+ val path1 = it.next match {
+ case p: Positioned => singlePath(p, path)
+ case xs: List[_] => childPath(xs.iterator, path)
+ case _ => path
+ }
+ if (path1 ne path) return path1
+ }
+ path
+ }
+ def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] =
+ if (p.pos contains pos) childPath(p.productIterator, p :: path)
+ else path
+ singlePath(from, Nil)
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/ast/PluggableTransformers.scala b/compiler/src/dotty/tools/dotc/ast/PluggableTransformers.scala
new file mode 100644
index 000000000..a584230a2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/PluggableTransformers.scala
@@ -0,0 +1,105 @@
+package dotty.tools.dotc
+package ast
+
+
+object PluggableTransformers {
+/*
+ import Trees._, Contexts._
+
+ abstract class PluggableTransformer[T] extends TreeTransformer[T, Context] {
+ type PluginOp[-N <: Tree[T]] = N => Tree[T]
+
+ private[this] var _ctx: Context = _
+ private[this] var _oldTree: Tree[T] = _
+
+ protected implicit def ctx: Context = _ctx
+ protected def oldTree: Tree[T] = _oldTree
+ protected def thisTransformer: PluggableTransformer[T] = this
+
+ class PluginOps[-N <: Tree[T]](op: PluginOp[N], val next: Plugins) {
+ def apply(tree: N, old: Tree[T], c: Context): Tree[T] = {
+ val savedCtx = _ctx
+ val savedOld = _oldTree
+ try {
+ op(tree)
+ } finally {
+ _oldTree = savedOld
+ _ctx = savedCtx
+ }
+ }
+ }
+
+ val NoOp: PluginOp[Tree[T]] = identity
+ val NoOps = new PluginOps(NoOp, null)
+
+ class Plugins {
+ def next: Plugins = null
+
+ def processIdent: PluginOp[Ident[T]] = NoOp
+ def processSelect: PluginOp[Select[T]] = NoOp
+
+ val IdentOps: PluginOps[Ident[T]] = NoOps
+ val SelectOps: PluginOps[Select[T]] = NoOps
+ }
+
+ val EmptyPlugin = new Plugins
+
+ private[this] var _plugins: Plugins = EmptyPlugin
+
+ override def plugins: Plugins = _plugins
+
+ class Plugin extends Plugins {
+ override val next = _plugins
+ _plugins = this
+
+ private def push[N <: Tree[T]](op: PluginOp[N], ops: => PluginOps[N]): PluginOps[N] =
+ if (op == NoOp) ops else new PluginOps(op, next)
+
+ override val IdentOps: PluginOps[Ident[T]] = push(processIdent, next.IdentOps)
+ override val SelectOps: PluginOps[Select[T]] = push(processSelect, next.SelectOps)
+ }
+
+ def postIdent(tree: Ident[T], old: Tree[T], c: Context, ops: PluginOps[Ident[T]]) =
+ if (ops eq NoOps) tree
+ else finishIdent(ops(tree, old, c), old, c, ops.next)
+
+ override def finishIdent(tree: Tree[T], old: Tree[T], c: Context, plugins: Plugins): Tree[T] = tree match {
+ case tree: Ident[_] => postIdent(tree, old, c, plugins.IdentOps)
+ case _ => postProcess(tree, old, c, plugins)
+ }
+
+ def postSelect(tree: Select[T], old: Tree[T], c: Context, ops: PluginOps[Select[T]]) =
+ if (ops eq NoOps) tree
+ else finishSelect(ops(tree, old, c), old, c, ops.next)
+
+ override def finishSelect(tree: Tree[T], old: Tree[T], c: Context, plugins: Plugins): Tree[T] = tree match {
+ case tree: Select[_] => postSelect(tree, old, c, plugins.SelectOps)
+ case _ => postProcess(tree, old, c, plugins)
+ }
+
+ protected def postProcess(tree: Tree[T], old: Tree[T], c: Context, plugins: Plugins): Tree[T] = tree match {
+ case tree: Ident[_] => finishIdent(tree, old, c, plugins)
+ case tree: Select[_] => finishSelect(tree, old, c, plugins)
+ }
+ }
+}
+
+import PluggableTransformers._, Types._, Trees._, Contexts._
+
+class ExampleTransformer extends PluggableTransformer[Type] {
+
+ object ExamplePlugin extends Plugin {
+ override def processIdent = {
+ case tree @ Ident(x) if x.isTypeName => tree.derivedSelect(tree, x)
+ case tree => tpd.Ident(???)
+ }
+ override def processSelect = { tree =>
+ if (tree.isType) tree.derivedIdent(tree.name)
+ else tpd.EmptyTree
+ }
+ }
+
+ override def transform(tree: tpd.Tree, ctx: Context) =
+ super.transform(tree, ctx)
+*/
+}
diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala
new file mode 100644
index 000000000..bb6817603
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala
@@ -0,0 +1,213 @@
+package dotty.tools.dotc
+package ast
+
+import util.Positions._
+import util.DotClass
+import core.Contexts.Context
+import core.Decorators._
+import core.Flags.JavaDefined
+import core.StdNames.nme
+
+/** A base class for things that have positions (currently: modifiers and trees)
+ */
+abstract class Positioned extends DotClass with Product {
+
+ private[this] var curPos: Position = _
+
+ setPos(initialPos)
+
+ /** The item's position.
+ */
+ def pos: Position = curPos
+
+ /** Destructively update `curPos` to given position. Also, set any missing
+ * positions in children.
+ */
+ protected def setPos(pos: Position): Unit = {
+ setPosUnchecked(pos)
+ if (pos.exists) setChildPositions(pos.toSynthetic)
+ }
+
+ /** A positioned item like this one with the position set to `pos`.
+ * if the positioned item is source-derived, a clone is returned.
+ * If the positioned item is synthetic, the position is updated
+ * destructively and the item itself is returned.
+ */
+ def withPos(pos: Position): this.type = {
+ val newpd = (if (pos == curPos || curPos.isSynthetic) this else clone).asInstanceOf[Positioned]
+ newpd.setPos(pos)
+ newpd.asInstanceOf[this.type]
+ }
+
+ def withPos(posd: Positioned): this.type =
+ if (posd == null) this else withPos(posd.pos)
+
+ /** This item with a position that's the union of the given `pos` and the
+ * current position.
+ */
+ def addPos(pos: Position): this.type = withPos(pos union this.pos)
+
+ /** Set position of this tree only, without performing
+ * any checks of consistency with - or updates of - other positions.
+ * Called from Unpickler when entering positions.
+ */
+ private[dotc] def setPosUnchecked(pos: Position) = curPos = pos
+
+ /** If any children of this node do not have positions,
+ * fit their positions between the positions of the known subtrees
+ * and transitively visit their children.
+ * The method is likely time-critical because it is invoked on any node
+ * we create, so we want to avoid object allocations in the common case.
+ * The method is naturally expressed as two mutually (tail-)recursive
+ * functions, one which computes the next element to consider or terminates if there
+ * is none and the other which propagates the position information to that element.
+ * But since mutual tail recursion is not supported in Scala, we express it instead
+ * as a while loop with a termination by return in the middle.
+ */
+ private def setChildPositions(pos: Position): Unit = {
+ var n = productArity // subnodes are analyzed right to left
+ var elems: List[Any] = Nil // children in lists still to be considered, from right to left
+ var end = pos.end // the last defined offset, fill in positions up to this offset
+ var outstanding: List[Positioned] = Nil // nodes that need their positions filled once a start position
+ // is known, from left to right.
+ def fillIn(ps: List[Positioned], start: Int, end: Int): Unit = ps match {
+ case p :: ps1 =>
+ p.setPos(Position(start, end))
+ fillIn(ps1, end, end)
+ case nil =>
+ }
+ while (true) {
+ var nextChild: Any = null // the next child to be considered
+ if (elems.nonEmpty) {
+ nextChild = elems.head
+ elems = elems.tail
+ }
+ else if (n > 0) {
+ n = n - 1
+ nextChild = productElement(n)
+ }
+ else {
+ fillIn(outstanding, pos.start, end)
+ return
+ }
+ nextChild match {
+ case p: Positioned =>
+ if (p.pos.exists) {
+ fillIn(outstanding, p.pos.end, end)
+ outstanding = Nil
+ end = p.pos.start
+ }
+ else outstanding = p :: outstanding
+ case xs: List[_] =>
+ elems = elems ::: xs.reverse
+ case _ =>
+ }
+ }
+ }
+
+ /** The initial, synthetic position. This is usually the union of all positioned children's positions.
+ */
+ def initialPos: Position = {
+ var n = productArity
+ var pos = NoPosition
+ while (n > 0) {
+ n -= 1
+ productElement(n) match {
+ case p: Positioned => pos = pos union p.pos
+ case xs: List[_] => pos = unionPos(pos, xs)
+ case _ =>
+ }
+ }
+ pos.toSynthetic
+ }
+
+ private def unionPos(pos: Position, xs: List[_]): Position = xs match {
+ case (p: Positioned) :: xs1 => unionPos(pos union p.pos, xs1)
+ case _ => pos
+ }
+
+ def contains(that: Positioned): Boolean = {
+ def isParent(x: Any): Boolean = x match {
+ case x: Positioned =>
+ x contains that
+ case xs: List[_] =>
+ xs exists isParent
+ case _ =>
+ false
+ }
+ (this eq that) ||
+ (this.pos contains that.pos) && {
+ var n = productArity
+ var found = false
+ while (!found && n > 0) {
+ n -= 1
+ found = isParent(productElement(n))
+ }
+ found
+ }
+ }
+
+ /** Check that all positioned items in this tree satisfy the following conditions:
+ * - Parent positions contain child positions
+ * - If item is a non-empty tree, it has a position
+ */
+ def checkPos(nonOverlapping: Boolean)(implicit ctx: Context): Unit = try {
+ import untpd._
+ var lastPositioned: Positioned = null
+ var lastPos = NoPosition
+ def check(p: Any): Unit = p match {
+ case p: Positioned =>
+ assert(pos contains p.pos,
+ s"""position error, parent position does not contain child positon
+ |parent = $this,
+ |parent position = $pos,
+ |child = $p,
+ |child position = ${p.pos}""".stripMargin)
+ p match {
+ case tree: Tree if !tree.isEmpty =>
+ assert(tree.pos.exists,
+ s"position error: position not set for $tree # ${tree.uniqueId}")
+ case _ =>
+ }
+ if (nonOverlapping) {
+ this match {
+ case _: WildcardFunction
+ if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] =>
+ // ignore transition from last wildcard parameter to body
+ case _ =>
+ assert(!lastPos.exists || !p.pos.exists || lastPos.end <= p.pos.start,
+ s"""position error, child positions overlap or in wrong order
+ |parent = $this
+ |1st child = $lastPositioned
+ |1st child position = $lastPos
+ |2nd child = $p
+ |2nd child position = ${p.pos}""".stripMargin)
+ }
+ lastPositioned = p
+ lastPos = p.pos
+ }
+ p.checkPos(nonOverlapping)
+ case xs: List[_] =>
+ xs.foreach(check)
+ case _ =>
+ }
+ this match {
+ case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) =>
+ // Special treatment for constructors coming from Java:
+ // Leave out tparams, they are copied with wrong positions from parent class
+ check(tree.mods)
+ check(tree.vparamss)
+ case _ =>
+ val end = productArity
+ var n = 0
+ while (n < end) {
+ check(productElement(n))
+ n += 1
+ }
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(i"error while checking $this")
+ throw ex
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
new file mode 100644
index 000000000..d1e6bd38a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
@@ -0,0 +1,733 @@
+package dotty.tools
+package dotc
+package ast
+
+import core._
+import Flags._, Trees._, Types._, Contexts._
+import Names._, StdNames._, NameOps._, Decorators._, Symbols._
+import util.HashSet
+import typer.ConstFold
+
+trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] =>
+ import TreeInfo._
+
+ // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc).
+ // But Scalac accepts the program happily without it. Need to find out why.
+
+ def unsplice[T >: Untyped](tree: Trees.Tree[T]): Trees.Tree[T] = tree.asInstanceOf[untpd.Tree] match {
+ case untpd.TypedSplice(tree1) => tree1.asInstanceOf[Trees.Tree[T]]
+ case _ => tree
+ }
+
+ def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match {
+ case DefDef(_, _, _, _, EmptyTree)
+ | ValDef(_, _, EmptyTree)
+ | TypeDef(_, _) => true
+ case _ => false
+ }
+
+ /** The largest subset of {NoInits, PureInterface} that a
+ * trait enclosing this statement can have as flags.
+ * Does tree contain an initialization part when seen as a member of a class or trait?
+ */
+ def defKind(tree: Tree): FlagSet = unsplice(tree) match {
+ case EmptyTree | _: Import => NoInitsInterface
+ case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface
+ case tree: DefDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else NoInits
+ case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags
+ case _ => EmptyFlags
+ }
+
+ def isOpAssign(tree: Tree) = unsplice(tree) match {
+ case Apply(fn, _ :: _) =>
+ unsplice(fn) match {
+ case Select(_, name) if name.isOpAssignmentName => true
+ case _ => false
+ }
+ case _ => false
+ }
+
+ class MatchingArgs(params: List[Symbol], args: List[Tree])(implicit ctx: Context) {
+ def foreach(f: (Symbol, Tree) => Unit): Boolean = {
+ def recur(params: List[Symbol], args: List[Tree]): Boolean = params match {
+ case Nil => args.isEmpty
+ case param :: params1 =>
+ if (param.info.isRepeatedParam) {
+ for (arg <- args) f(param, arg)
+ true
+ } else args match {
+ case Nil => false
+ case arg :: args1 =>
+ f(param, args.head)
+ recur(params1, args1)
+ }
+ }
+ recur(params, args)
+ }
+ def zipped: List[(Symbol, Tree)] = map((_, _))
+ def map[R](f: (Symbol, Tree) => R): List[R] = {
+ val b = List.newBuilder[R]
+ foreach(b += f(_, _))
+ b.result
+ }
+ }
+
+ /** The method part of an application node, possibly enclosed in a block
+ * with only valdefs as statements. the reason for also considering blocks
+ * is that named arguments can transform a call into a block, e.g.
+ * <init>(b = foo, a = bar)
+ * is transformed to
+ * { val x$1 = foo
+ * val x$2 = bar
+ * <init>(x$2, x$1)
+ * }
+ */
+ def methPart(tree: Tree): Tree = stripApply(tree) match {
+ case TypeApply(fn, _) => methPart(fn)
+ case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed
+ case Block(stats, expr) => methPart(expr)
+ case mp => mp
+ }
+
+ /** If this is an application, its function part, stripping all
+ * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself.
+ */
+ def stripApply(tree: Tree): Tree = unsplice(tree) match {
+ case Apply(fn, _) => stripApply(fn)
+ case _ => tree
+ }
+
+ /** The number of arguments in an application */
+ def numArgs(tree: Tree): Int = unsplice(tree) match {
+ case Apply(fn, args) => numArgs(fn) + args.length
+ case TypeApply(fn, _) => numArgs(fn)
+ case Block(_, expr) => numArgs(expr)
+ case _ => 0
+ }
+
+ /** The (last) list of arguments of an application */
+ def arguments(tree: Tree): List[Tree] = unsplice(tree) match {
+ case Apply(_, args) => args
+ case TypeApply(fn, _) => arguments(fn)
+ case Block(_, expr) => arguments(expr)
+ case _ => Nil
+ }
+
+ /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the
+ * same object?
+ */
+ def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match {
+ case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true
+ case _ => false
+ }
+
+ /** Is tree a super constructor call?
+ */
+ def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match {
+ case Select(Super(_, _), nme.CONSTRUCTOR) => true
+ case _ => false
+ }
+
+ def isSuperSelection(tree: untpd.Tree) = unsplice(tree) match {
+ case Select(Super(_, _), _) => true
+ case _ => false
+ }
+
+ def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match {
+ case Ident(nme.CONSTRUCTOR)
+ | Select(This(_), nme.CONSTRUCTOR)
+ | Select(Super(_, _), nme.CONSTRUCTOR) => true
+ case _ => false
+ }
+
+ /** Is tree a variable pattern? */
+ def isVarPattern(pat: untpd.Tree): Boolean = unsplice(pat) match {
+ case x: BackquotedIdent => false
+ case x: Ident => x.name.isVariableName
+ case _ => false
+ }
+
+ /** The first constructor definition in `stats` */
+ def firstConstructor(stats: List[Tree]): Tree = stats match {
+ case (meth: DefDef) :: _ if meth.name.isConstructorName => meth
+ case stat :: stats => firstConstructor(stats)
+ case nil => EmptyTree
+ }
+
+ /** The arguments to the first constructor in `stats`. */
+ def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
+ case DefDef(_, _, args :: _, _, _) => args
+ case _ => Nil
+ }
+
+ /** Is tpt a vararg type of the form T* or => T*? */
+ def isRepeatedParamType(tpt: Tree)(implicit ctx: Context): Boolean = tpt match {
+ case ByNameTypeTree(tpt1) => isRepeatedParamType(tpt1)
+ case tpt: TypeTree => tpt.typeOpt.isRepeatedParam
+ case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true
+ case _ => false
+ }
+
+ /** Is name a left-associative operator? */
+ def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.last != ':')
+
+ /** can this type be a type pattern? */
+ def mayBeTypePat(tree: untpd.Tree): Boolean = unsplice(tree) match {
+ case AndTypeTree(tpt1, tpt2) => mayBeTypePat(tpt1) || mayBeTypePat(tpt2)
+ case OrTypeTree(tpt1, tpt2) => mayBeTypePat(tpt1) || mayBeTypePat(tpt2)
+ case RefinedTypeTree(tpt, refinements) => mayBeTypePat(tpt) || refinements.exists(_.isInstanceOf[Bind])
+ case AppliedTypeTree(tpt, args) => mayBeTypePat(tpt) || args.exists(_.isInstanceOf[Bind])
+ case Select(tpt, _) => mayBeTypePat(tpt)
+ case Annotated(tpt, _) => mayBeTypePat(tpt)
+ case _ => false
+ }
+
+ /** Is this argument node of the form <expr> : _* ?
+ */
+ def isWildcardStarArg(tree: Tree)(implicit ctx: Context): Boolean = unbind(tree) match {
+ case Typed(Ident(nme.WILDCARD_STAR), _) => true
+ case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true
+ case Typed(_, tpt: TypeTree) => tpt.hasType && tpt.tpe.isRepeatedParam
+ case _ => false
+ }
+
+ /** If this tree has type parameters, those. Otherwise Nil.
+ def typeParameters(tree: Tree): List[TypeDef] = tree match {
+ case DefDef(_, _, tparams, _, _, _) => tparams
+ case ClassDef(_, _, tparams, _) => tparams
+ case TypeDef(_, _, tparams, _) => tparams
+ case _ => Nil
+ }*/
+
+ /** Does this argument list end with an argument of the form <expr> : _* ? */
+ def isWildcardStarArgList(trees: List[Tree])(implicit ctx: Context) =
+ trees.nonEmpty && isWildcardStarArg(trees.last)
+
+ /** Is the argument a wildcard argument of the form `_` or `x @ _`?
+ */
+ def isWildcardArg(tree: Tree): Boolean = unbind(tree) match {
+ case Ident(nme.WILDCARD) => true
+ case _ => false
+ }
+
+ /** Does this list contain a named argument tree? */
+ def hasNamedArg(args: List[Any]) = args exists isNamedArg
+ val isNamedArg = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]]
+
+ /** Is this pattern node a catch-all (wildcard or variable) pattern? */
+ def isDefaultCase(cdef: CaseDef) = cdef match {
+ case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat)
+ case _ => false
+ }
+
+ /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
+ * whether the user provided cases are exhaustive. */
+ def isSyntheticDefaultCase(cdef: CaseDef) = unsplice(cdef) match {
+ case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
+ case _ => false
+ }
+
+ /** Does this CaseDef catch Throwable? */
+ def catchesThrowable(cdef: CaseDef)(implicit ctx: Context) =
+ catchesAllOf(cdef, defn.ThrowableType)
+
+ /** Does this CaseDef catch everything of a certain Type? */
+ def catchesAllOf(cdef: CaseDef, threshold: Type)(implicit ctx: Context) =
+ isDefaultCase(cdef) ||
+ cdef.guard.isEmpty && {
+ unbind(cdef.pat) match {
+ case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt
+ case _ => false
+ }
+ }
+
+ /** Is this case guarded? */
+ def isGuardedCase(cdef: CaseDef) = cdef.guard ne EmptyTree
+
+ /** The underlying pattern ignoring any bindings */
+ def unbind(x: Tree): Tree = unsplice(x) match {
+ case Bind(_, y) => unbind(y)
+ case y => y
+ }
+
+ /** Checks whether predicate `p` is true for all result parts of this expression,
+ * where we zoom into Ifs, Matches, and Blocks.
+ */
+ def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match {
+ case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p)
+ case Match(_, cases) => cases forall (c => forallResults(c.body, p))
+ case Block(_, expr) => forallResults(expr, p)
+ case _ => p(tree)
+ }
+}
+
+trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] =>
+ import TreeInfo._
+ import untpd._
+
+ /** True iff definition is a val or def with no right-hand-side, or it
+ * is an abstract typoe declaration
+ */
+ def lacksDefinition(mdef: MemberDef)(implicit ctx: Context) = mdef match {
+ case mdef: ValOrDefDef =>
+ mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor)
+ case mdef: TypeDef =>
+ def isBounds(rhs: Tree): Boolean = rhs match {
+ case _: TypeBoundsTree => true
+ case PolyTypeTree(_, body) => isBounds(body)
+ case _ => false
+ }
+ mdef.rhs.isEmpty || isBounds(mdef.rhs)
+ case _ => false
+ }
+
+ def isFunctionWithUnknownParamType(tree: Tree) = tree match {
+ case Function(args, _) =>
+ args.exists {
+ case ValDef(_, tpt, _) => tpt.isEmpty
+ case _ => false
+ }
+ case _ => false
+ }
+
+ // todo: fill with other methods from TreeInfo that only apply to untpd.Tree's
+}
+
+trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
+ import TreeInfo._
+ import tpd._
+
+ /** The purity level of this statement.
+ * @return pure if statement has no side effects
+ * idempotent if running the statement a second time has no side effects
+ * impure otherwise
+ */
+ private def statPurity(tree: Tree)(implicit ctx: Context): PurityLevel = unsplice(tree) match {
+ case EmptyTree
+ | TypeDef(_, _)
+ | Import(_, _)
+ | DefDef(_, _, _, _, _) =>
+ Pure
+ case vdef @ ValDef(_, _, _) =>
+ if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs)
+ case _ =>
+ Impure
+ // TODO: It seem like this should be exprPurity(tree)
+ // But if we do that the repl/vars test break. Need to figure out why that's the case.
+ }
+
+ /** The purity level of this expression.
+ * @return pure if expression has no side effects
+ * idempotent if running the expression a second time has no side effects
+ * impure otherwise
+ *
+ * Note that purity and idempotency are different. References to modules and lazy
+ * vals are impure (side-effecting) both because side-effecting code may be executed and because the first reference
+ * takes a different code path than all to follow; but they are idempotent
+ * because running the expression a second time gives the cached result.
+ */
+ private def exprPurity(tree: Tree)(implicit ctx: Context): PurityLevel = unsplice(tree) match {
+ case EmptyTree
+ | This(_)
+ | Super(_, _)
+ | Literal(_)
+ | Closure(_, _, _) =>
+ Pure
+ case Ident(_) =>
+ refPurity(tree)
+ case Select(qual, _) =>
+ refPurity(tree).min(exprPurity(qual))
+ case TypeApply(fn, _) =>
+ exprPurity(fn)
+/*
+ * Not sure we'll need that. Comment out until we find out
+ case Apply(Select(free @ Ident(_), nme.apply), _) if free.symbol.name endsWith nme.REIFY_FREE_VALUE_SUFFIX =>
+ // see a detailed explanation of this trick in `GenSymbols.reifyFreeTerm`
+ free.symbol.hasStableFlag && isIdempotentExpr(free)
+*/
+ case Apply(fn, args) =>
+ def isKnownPureOp(sym: Symbol) =
+ sym.owner.isPrimitiveValueClass || sym.owner == defn.StringClass
+ // Note: After uncurry, field accesses are represented as Apply(getter, Nil),
+ // so an Apply can also be pure.
+ if (args.isEmpty && fn.symbol.is(Stable)) exprPurity(fn)
+ else if (tree.tpe.isInstanceOf[ConstantType] && isKnownPureOp(tree.symbol))
+ // A constant expression with pure arguments is pure.
+ minOf(exprPurity(fn), args.map(exprPurity))
+ else Impure
+ case Typed(expr, _) =>
+ exprPurity(expr)
+ case Block(stats, expr) =>
+ minOf(exprPurity(expr), stats.map(statPurity))
+ case _ =>
+ Impure
+ }
+
+ private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = (l0 /: ls)(_ min _)
+
+ def isPureExpr(tree: Tree)(implicit ctx: Context) = exprPurity(tree) == Pure
+ def isIdempotentExpr(tree: Tree)(implicit ctx: Context) = exprPurity(tree) >= Idempotent
+
+ /** The purity level of this reference.
+ * @return
+ * pure if reference is (nonlazy and stable) or to a parameterized function
+ * idempotent if reference is lazy and stable
+ * impure otherwise
+ * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable
+ * flags set.
+ */
+ private def refPurity(tree: Tree)(implicit ctx: Context): PurityLevel =
+ if (!tree.tpe.widen.isParameterless) Pure
+ else if (!tree.symbol.isStable) Impure
+ else if (tree.symbol.is(Lazy)) Idempotent // TODO add Module flag, sinxce Module vals or not Lazy from the start.
+ else Pure
+
+ def isPureRef(tree: Tree)(implicit ctx: Context) =
+ refPurity(tree) == Pure
+ def isIdempotentRef(tree: Tree)(implicit ctx: Context) =
+ refPurity(tree) >= Idempotent
+
+ /** If `tree` is a constant expression, its value as a Literal,
+ * or `tree` itself otherwise.
+ *
+ * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose.
+ * Example
+ *
+ * object O { final val x = 42; println("43") }
+ * O.x
+ *
+ * Strictly speaking we can't replace `O.x` with `42`. But this would make
+ * most expressions non-constant. Maybe we can change the spec to accept this
+ * kind of eliding behavior. Or else enforce true purity in the compiler.
+ * The choice will be affected by what we will do with `inline` and with
+ * Singleton type bounds (see SIP 23). Presumably
+ *
+ * object O1 { val x: Singleton = 42; println("43") }
+ * object O2 { inline val x = 42; println("43") }
+ *
+ * should behave differently.
+ *
+ * O1.x should have the same effect as { println("43"); 42 }
+ *
+ * whereas
+ *
+ * O2.x = 42
+ *
+ * Revisit this issue once we have implemented `inline`. Then we can demand
+ * purity of the prefix unless the selection goes to an inline val.
+ *
+ * Note: This method should be applied to all term tree nodes that are not literals,
+ * that can be idempotent, and that can have constant types. So far, only nodes
+ * of the following classes qualify:
+ *
+ * Ident
+ * Select
+ * TypeApply
+ */
+ def constToLiteral(tree: Tree)(implicit ctx: Context): Tree = {
+ val tree1 = ConstFold(tree)
+ tree1.tpe.widenTermRefExpr match {
+ case ConstantType(value) if isIdempotentExpr(tree1) => Literal(value)
+ case _ => tree1
+ }
+ }
+
+ /** Is symbol potentially a getter of a mutable variable?
+ */
+ def mayBeVarGetter(sym: Symbol)(implicit ctx: Context): Boolean = {
+ def maybeGetterType(tpe: Type): Boolean = tpe match {
+ case _: ExprType | _: ImplicitMethodType => true
+ case tpe: PolyType => maybeGetterType(tpe.resultType)
+ case _ => false
+ }
+ sym.owner.isClass && !sym.isStable && maybeGetterType(sym.info)
+ }
+
+ /** Is tree a reference to a mutable variable, or to a potential getter
+ * that has a setter in the same class?
+ */
+ def isVariableOrGetter(tree: Tree)(implicit ctx: Context) = {
+ def sym = tree.symbol
+ def isVar = sym is Mutable
+ def isGetter =
+ mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists
+
+ unsplice(tree) match {
+ case Ident(_) => isVar
+ case Select(_, _) => isVar || isGetter
+ case Apply(_, _) =>
+ methPart(tree) match {
+ case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists
+ case _ => false
+ }
+ case _ => false
+ }
+ }
+
+ /** Is tree a `this` node which belongs to `enclClass`? */
+ def isSelf(tree: Tree, enclClass: Symbol)(implicit ctx: Context): Boolean = unsplice(tree) match {
+ case This(_) => tree.symbol == enclClass
+ case _ => false
+ }
+
+ /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */
+ def stripCast(tree: Tree)(implicit ctx: Context): Tree = {
+ def isCast(sel: Tree) = sel.symbol == defn.Any_asInstanceOf
+ unsplice(tree) match {
+ case TypeApply(sel @ Select(inner, _), _) if isCast(sel) =>
+ stripCast(inner)
+ case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) =>
+ stripCast(inner)
+ case t =>
+ t
+ }
+ }
+
+ /** Decompose a call fn[targs](vargs_1)...(vargs_n)
+ * into its constituents (where targs, vargss may be empty)
+ */
+ def decomposeCall(tree: Tree): (Tree, List[Tree], List[List[Tree]]) = tree match {
+ case Apply(fn, args) =>
+ val (meth, targs, argss) = decomposeCall(fn)
+ (meth, targs, argss :+ args)
+ case TypeApply(fn, targs) =>
+ val (meth, Nil, Nil) = decomposeCall(fn)
+ (meth, targs, Nil)
+ case _ =>
+ (tree, Nil, Nil)
+ }
+
+ /** An extractor for closures, either contained in a block or standalone.
+ */
+ object closure {
+ def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match {
+ case Block(_, Closure(env, meth, tpt)) => Some(env, meth, tpt)
+ case Closure(env, meth, tpt) => Some(env, meth, tpt)
+ case _ => None
+ }
+ }
+
+ /** If tree is a closure, its body, otherwise tree itself */
+ def closureBody(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case Block((meth @ DefDef(nme.ANON_FUN, _, _, _, _)) :: Nil, Closure(_, _, _)) => meth.rhs
+ case _ => tree
+ }
+
+ /** The variables defined by a pattern, in reverse order of their appearance. */
+ def patVars(tree: Tree)(implicit ctx: Context): List[Symbol] = {
+ val acc = new TreeAccumulator[List[Symbol]] {
+ def apply(syms: List[Symbol], tree: Tree)(implicit ctx: Context) = tree match {
+ case Bind(_, body) => apply(tree.symbol :: syms, body)
+ case _ => foldOver(syms, tree)
+ }
+ }
+ acc(Nil, tree)
+ }
+
+ /** Is this pattern node a catch-all or type-test pattern? */
+ def isCatchCase(cdef: CaseDef)(implicit ctx: Context) = cdef match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) =>
+ isSimpleThrowable(tpt.tpe)
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) =>
+ isSimpleThrowable(tpt.tpe)
+ case _ =>
+ isDefaultCase(cdef)
+ }
+
+ private def isSimpleThrowable(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp @ TypeRef(pre, _) =>
+ (pre == NoPrefix || pre.widen.typeSymbol.isStatic) &&
+ (tp.symbol derivesFrom defn.ThrowableClass) && !(tp.symbol is Trait)
+ case _ =>
+ false
+ }
+
+ /** The symbols defined locally in a statement list */
+ def localSyms(stats: List[Tree])(implicit ctx: Context): List[Symbol] =
+ for (stat <- stats if stat.isDef && stat.symbol.exists) yield stat.symbol
+
+ /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */
+ def definedSym(tree: Tree)(implicit ctx: Context): Symbol =
+ if (tree.isDef) tree.symbol else NoSymbol
+
+ /** Going from child to parent, the path of tree nodes that starts
+ * with a definition of symbol `sym` and ends with `root`, or Nil
+ * if no such path exists.
+ * Pre: `sym` must have a position.
+ */
+ def defPath(sym: Symbol, root: Tree)(implicit ctx: Context): List[Tree] = ctx.debugTraceIndented(s"defpath($sym with position ${sym.pos}, ${root.show})") {
+ require(sym.pos.exists)
+ object accum extends TreeAccumulator[List[Tree]] {
+ def apply(x: List[Tree], tree: Tree)(implicit ctx: Context): List[Tree] = {
+ if (tree.pos.contains(sym.pos))
+ if (definedSym(tree) == sym) tree :: x
+ else {
+ val x1 = foldOver(x, tree)
+ if (x1 ne x) tree :: x1 else x1
+ }
+ else x
+ }
+ }
+ accum(Nil, root)
+ }
+
+
+ /** The top level classes in this tree, including only those module classes that
+ * are not a linked class of some other class in the result.
+ */
+ def topLevelClasses(tree: Tree)(implicit ctx: Context): List[ClassSymbol] = tree match {
+ case PackageDef(_, stats) => stats.flatMap(topLevelClasses)
+ case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil
+ case _ => Nil
+ }
+
+ /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */
+ def sliceTopLevel(tree: Tree, cls: ClassSymbol)(implicit ctx: Context): List[Tree] = tree match {
+ case PackageDef(pid, stats) =>
+ cpy.PackageDef(tree)(pid, stats.flatMap(sliceTopLevel(_, cls))) :: Nil
+ case tdef: TypeDef =>
+ val sym = tdef.symbol
+ assert(sym.isClass)
+ if (cls == sym || cls == sym.linkedClass) tdef :: Nil
+ else Nil
+ case vdef: ValDef =>
+ val sym = vdef.symbol
+ assert(sym is Module)
+ if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil
+ else Nil
+ case tree =>
+ tree :: Nil
+ }
+
+ /** The statement sequence that contains a definition of `sym`, or Nil
+ * if none was found.
+ * For a tree to be found, The symbol must have a position and its definition
+ * tree must be reachable from come tree stored in an enclosing context.
+ */
+ def definingStats(sym: Symbol)(implicit ctx: Context): List[Tree] =
+ if (!sym.pos.exists || (ctx eq NoContext) || ctx.compilationUnit == null) Nil
+ else defPath(sym, ctx.compilationUnit.tpdTree) match {
+ case defn :: encl :: _ =>
+ def verify(stats: List[Tree]) =
+ if (stats exists (definedSym(_) == sym)) stats else Nil
+ encl match {
+ case Block(stats, _) => verify(stats)
+ case encl: Template => verify(encl.body)
+ case PackageDef(_, stats) => verify(stats)
+ case _ => Nil
+ }
+ case nil =>
+ Nil
+ }
+}
+
+object TreeInfo {
+ class PurityLevel(val x: Int) extends AnyVal {
+ def >= (that: PurityLevel) = x >= that.x
+ def min(that: PurityLevel) = new PurityLevel(x min that.x)
+ }
+
+ val Pure = new PurityLevel(2)
+ val Idempotent = new PurityLevel(1)
+ val Impure = new PurityLevel(0)
+}
+
+ /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe)
+ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation defn.SwitchClass
+ */
+
+ /** Does list of trees start with a definition of
+ * a class of module with given name (ignoring imports)
+ def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
+ case Import(_, _) :: xs => firstDefinesClassOrObject(xs, name)
+ case Annotated(_, tree1) :: Nil => firstDefinesClassOrObject(List(tree1), name)
+ case ModuleDef(_, `name`, _) :: Nil => true
+ case ClassDef(_, `name`, _, _) :: Nil => true
+ case _ => false
+ }
+
+
+ /** Is this file the body of a compilation unit which should not
+ * have Predef imported?
+ */
+ def noPredefImportForUnit(body: Tree) = {
+ // Top-level definition whose leading imports include Predef.
+ def isLeadingPredefImport(defn: Tree): Boolean = defn match {
+ case PackageDef(_, defs1) => defs1 exists isLeadingPredefImport
+ case Import(expr, _) => isReferenceToPredef(expr)
+ case _ => false
+ }
+ // Compilation unit is class or object 'name' in package 'scala'
+ def isUnitInScala(tree: Tree, name: Name) = tree match {
+ case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name)
+ case _ => false
+ }
+
+ isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body)
+ }
+ */
+
+ /*
+ def isAbsTypeDef(tree: Tree) = tree match {
+ case TypeDef(_, _, _, TypeBoundsTree(_, _)) => true
+ case TypeDef(_, _, _, rhs) => rhs.tpe.isInstanceOf[TypeBounds]
+ case _ => false
+ }
+
+ def isAliasTypeDef(tree: Tree) = tree match {
+ case TypeDef(_, _, _, _) => !isAbsTypeDef(tree)
+ case _ => false
+ }
+
+ /** Some handy extractors for spotting trees through the
+ * the haze of irrelevant braces: i.e. Block(Nil, SomeTree)
+ * should not keep us from seeing SomeTree.
+ */
+ abstract class SeeThroughBlocks[T] {
+ protected def unapplyImpl(x: Tree): T
+ def unapply(x: Tree): T = x match {
+ case Block(Nil, expr) => unapply(expr)
+ case _ => unapplyImpl(x)
+ }
+ }
+ object IsTrue extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x match {
+ case Literal(Constant(true)) => true
+ case _ => false
+ }
+ }
+ object IsFalse extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x match {
+ case Literal(Constant(false)) => true
+ case _ => false
+ }
+ }
+ object IsIf extends SeeThroughBlocks[Option[(Tree, Tree, Tree)]] {
+ protected def unapplyImpl(x: Tree) = x match {
+ case If(cond, thenp, elsep) => Some((cond, thenp, elsep))
+ case _ => None
+ }
+ }
+
+ object MacroImplReference {
+ private def refPart(tree: Tree): Tree = tree match {
+ case TypeApply(fun, _) => refPart(fun)
+ case ref: RefTree => ref
+ case _ => EmptyTree()
+ }
+
+ def unapply(tree: Tree) = refPart(tree) match {
+ case ref: RefTree => Some((ref.qualifier.symbol, ref.symbol, dissectApplied(tree).targs))
+ case _ => None
+ }
+ }
+
+ def isNullaryInvocation(tree: Tree): Boolean =
+ tree.symbol != null && tree.symbol.isMethod && (tree match {
+ case TypeApply(fun, _) => isNullaryInvocation(fun)
+ case tree: RefTree => true
+ case _ => false
+ })*/
+
+
+
diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala
new file mode 100644
index 000000000..cf529dfda
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala
@@ -0,0 +1,187 @@
+package dotty.tools
+package dotc
+package ast
+
+import core._
+import Types._, Contexts._, Constants._, Names._, Flags._
+import SymDenotations._, Symbols._, Annotations._, Trees._, Symbols._
+import Denotations._, Decorators._
+import dotty.tools.dotc.transform.SymUtils._
+
+/** A map that applies three functions and a substitution together to a tree and
+ * makes sure they are coordinated so that the result is well-typed. The functions are
+ * @param typeMap A function from Type to Type that gets applied to the
+ * type of every tree node and to all locally defined symbols,
+ * followed by the substitution [substFrom := substTo].
+ * @param treeMap A transformer that translates all encountered subtrees in
+ * prefix traversal orders
+ * @param oldOwners Previous owners. If a top-level local symbol in the mapped tree
+ * has one of these as an owner, the owner is replaced by the corresponding
+ * symbol in `newOwners`.
+ * @param newOwners New owners, replacing previous owners.
+ * @param substFrom The symbols that need to be substituted.
+ * @param substTo The substitution targets.
+ *
+ * The reason the substitution is broken out from the rest of the type map is
+ * that all symbols have to be substituted at the same time. If we do not do this,
+ * we risk data races on named types. Example: Say we have `outer#1.inner#2` and we
+ * have two substitutions S1 = [outer#1 := outer#3], S2 = [inner#2 := inner#4] where
+ * hashtags precede symbol ids. If we do S1 first, we get outer#2.inner#3. If we then
+ * do S2 we get outer#2.inner#4. But that means that the named type outer#2.inner
+ * gets two different denotations in the same period. Hence, if -Yno-double-bindings is
+ * set, we would get a data race assertion error.
+ */
+final class TreeTypeMap(
+ val typeMap: Type => Type = IdentityTypeMap,
+ val treeMap: tpd.Tree => tpd.Tree = identity _,
+ val oldOwners: List[Symbol] = Nil,
+ val newOwners: List[Symbol] = Nil,
+ val substFrom: List[Symbol] = Nil,
+ val substTo: List[Symbol] = Nil)(implicit ctx: Context) extends tpd.TreeMap {
+ import tpd._
+
+ /** If `sym` is one of `oldOwners`, replace by corresponding symbol in `newOwners` */
+ def mapOwner(sym: Symbol) = sym.subst(oldOwners, newOwners)
+
+ /** Replace occurrences of `This(oldOwner)` in some prefix of a type
+ * by the corresponding `This(newOwner)`.
+ */
+ private val mapOwnerThis = new TypeMap {
+ private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match {
+ case Nil => tp
+ case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType))
+ case _ :: from1 => mapPrefix(from1, to.tail, tp)
+ }
+ def apply(tp: Type): Type = tp match {
+ case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix))
+ case _ => mapOver(tp)
+ }
+ }
+
+ def mapType(tp: Type) =
+ mapOwnerThis(typeMap(tp).substSym(substFrom, substTo))
+
+ private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit =
+ if (prevStats.isEmpty) assert(newStats.isEmpty)
+ else {
+ prevStats.head match {
+ case pdef: MemberDef =>
+ val prevSym = pdef.symbol
+ val newSym = newStats.head.symbol
+ val newCls = newSym.owner.asClass
+ if (prevSym != newSym) newCls.replace(prevSym, newSym)
+ case _ =>
+ }
+ updateDecls(prevStats.tail, newStats.tail)
+ }
+
+ override def transform(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = treeMap(tree) match {
+ case impl @ Template(constr, parents, self, _) =>
+ val tmap = withMappedSyms(localSyms(impl :: self :: Nil))
+ cpy.Template(impl)(
+ constr = tmap.transformSub(constr),
+ parents = parents mapconserve transform,
+ self = tmap.transformSub(self),
+ body = impl.body mapconserve
+ (tmap.transform(_)(ctx.withOwner(mapOwner(impl.symbol.owner))))
+ ).withType(tmap.mapType(impl.tpe))
+ case tree1 =>
+ tree1.withType(mapType(tree1.tpe)) match {
+ case id: Ident if tpd.needsSelect(id.tpe) =>
+ ref(id.tpe.asInstanceOf[TermRef]).withPos(id.pos)
+ case ddef @ DefDef(name, tparams, vparamss, tpt, _) =>
+ val (tmap1, tparams1) = transformDefs(ddef.tparams)
+ val (tmap2, vparamss1) = tmap1.transformVParamss(vparamss)
+ val res = cpy.DefDef(ddef)(name, tparams1, vparamss1, tmap2.transform(tpt), tmap2.transform(ddef.rhs))
+ res.symbol.transformAnnotations {
+ case ann: BodyAnnotation => ann.derivedAnnotation(res.rhs)
+ case ann => ann
+ }
+ res
+ case blk @ Block(stats, expr) =>
+ val (tmap1, stats1) = transformDefs(stats)
+ val expr1 = tmap1.transform(expr)
+ cpy.Block(blk)(stats1, expr1)
+ case inlined @ Inlined(call, bindings, expanded) =>
+ val (tmap1, bindings1) = transformDefs(bindings)
+ val expanded1 = tmap1.transform(expanded)
+ cpy.Inlined(inlined)(call, bindings1, expanded1)
+ case cdef @ CaseDef(pat, guard, rhs) =>
+ val tmap = withMappedSyms(patVars(pat))
+ val pat1 = tmap.transform(pat)
+ val guard1 = tmap.transform(guard)
+ val rhs1 = tmap.transform(rhs)
+ cpy.CaseDef(cdef)(pat1, guard1, rhs1)
+ case tree1 =>
+ super.transform(tree1)
+ }
+ }
+
+ override def transformStats(trees: List[tpd.Tree])(implicit ctx: Context) =
+ transformDefs(trees)._2
+
+ private def transformDefs[TT <: tpd.Tree](trees: List[TT])(implicit ctx: Context): (TreeTypeMap, List[TT]) = {
+ val tmap = withMappedSyms(tpd.localSyms(trees))
+ (tmap, tmap.transformSub(trees))
+ }
+
+ private def transformVParamss(vparamss: List[List[ValDef]]): (TreeTypeMap, List[List[ValDef]]) = vparamss match {
+ case vparams :: rest =>
+ val (tmap1, vparams1) = transformDefs(vparams)
+ val (tmap2, vparamss2) = tmap1.transformVParamss(rest)
+ (tmap2, vparams1 :: vparamss2)
+ case nil =>
+ (this, vparamss)
+ }
+
+ def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree]
+
+ def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree))
+
+ /** The current tree map composed with a substitution [from -> to] */
+ def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap =
+ if (from eq to) this
+ else {
+ // assert that substitution stays idempotent, assuming its parts are
+ // TODO: It might be better to cater for the asserted-away conditions, by
+ // setting up a proper substitution abstraction with a compose operator that
+ // guarantees idempotence. But this might be too inefficient in some cases.
+ // We'll cross that bridge when we need to.
+ assert(!from.exists(substTo contains _))
+ assert(!to.exists(substFrom contains _))
+ assert(!from.exists(newOwners contains _))
+ assert(!to.exists(oldOwners contains _))
+ new TreeTypeMap(
+ typeMap,
+ treeMap,
+ from ++ oldOwners,
+ to ++ newOwners,
+ from ++ substFrom,
+ to ++ substTo)
+ }
+
+ /** Apply `typeMap` and `ownerMap` to given symbols `syms`
+ * and return a treemap that contains the substitution
+ * between original and mapped symbols.
+ */
+ def withMappedSyms(syms: List[Symbol], mapAlways: Boolean = false): TreeTypeMap =
+ withMappedSyms(syms, ctx.mapSymbols(syms, this, mapAlways))
+
+ /** The tree map with the substitution between originals `syms`
+ * and mapped symbols `mapped`. Also goes into mapped classes
+ * and substitutes their declarations.
+ */
+ def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = {
+ val symsChanged = syms ne mapped
+ val substMap = withSubstitution(syms, mapped)
+ val fullMap = (substMap /: mapped.filter(_.isClass)) { (tmap, cls) =>
+ val origDcls = cls.info.decls.toList
+ val mappedDcls = ctx.mapSymbols(origDcls, tmap)
+ val tmap1 = tmap.withMappedSyms(origDcls, mappedDcls)
+ if (symsChanged) (origDcls, mappedDcls).zipped.foreach(cls.asClass.replace)
+ tmap1
+ }
+ if (symsChanged || (fullMap eq substMap)) fullMap
+ else withMappedSyms(syms, mapAlways = true)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala
new file mode 100644
index 000000000..2801bcae2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala
@@ -0,0 +1,1295 @@
+package dotty.tools
+package dotc
+package ast
+
+import core._
+import Types._, Names._, Flags._, util.Positions._, Contexts._, Constants._
+import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._
+import annotation.tailrec
+import language.higherKinds
+import collection.IndexedSeqOptimized
+import collection.immutable.IndexedSeq
+import collection.mutable.ListBuffer
+import parsing.Tokens.Token
+import printing.Printer
+import util.{Stats, Attachment, Property, DotClass}
+import annotation.unchecked.uncheckedVariance
+import language.implicitConversions
+
+object Trees {
+
+ // Note: it would be more logical to make Untyped = Nothing.
+ // However, this interacts in a bad way with Scala's current type inference.
+ // In fact, we cannot write something like Select(pre, name), where pre is
+ // of type Tree[Nothing]; type inference will treat the Nothing as an uninstantiated
+ // value and will not infer Nothing as the type parameter for Select.
+ // We should come back to this issue once type inference is changed.
+ type Untyped = Null
+
+ /** The total number of created tree nodes, maintained if Stats.enabled */
+ @sharable var ntrees = 0
+
+ /** Property key for trees with documentation strings attached */
+ val DocComment = new Property.Key[Comment]
+
+ @sharable private var nextId = 0 // for debugging
+
+ type LazyTree = AnyRef /* really: Tree | Lazy[Tree] */
+ type LazyTreeList = AnyRef /* really: List[Tree] | Lazy[List[Tree]] */
+
+ /** Trees take a parameter indicating what the type of their `tpe` field
+ * is. Two choices: `Type` or `Untyped`.
+ * Untyped trees have type `Tree[Untyped]`.
+ *
+ * Tree typing uses a copy-on-write implementation:
+ *
+ * - You can never observe a `tpe` which is `null` (throws an exception)
+ * - So when creating a typed tree with `withType` we can re-use
+ * the existing tree transparently, assigning its `tpe` field,
+ * provided it was `null` before.
+ * - It is impossible to embed untyped trees in typed ones.
+ * - Typed trees can be embedded in untyped ones provided they are rooted
+ * in a TypedSplice node.
+ * - Type checking an untyped tree should remove all embedded `TypedSplice`
+ * nodes.
+ */
+ abstract class Tree[-T >: Untyped] extends Positioned
+ with Product
+ with Attachment.Container
+ with printing.Showable
+ with Cloneable {
+
+ if (Stats.enabled) ntrees += 1
+
+ private def nxId = {
+ nextId += 1
+ //assert(nextId != 199, this)
+ nextId
+ }
+
+ /** A unique identifier for this tree. Used for debugging, and potentially
+ * tracking presentation compiler interactions
+ */
+ private var myUniqueId: Int = nxId
+
+ def uniqueId = myUniqueId
+
+ /** The type constructor at the root of the tree */
+ type ThisTree[T >: Untyped] <: Tree[T]
+
+ private[this] var myTpe: T = _
+
+ /** Destructively set the type of the tree. This should be called only when it is known that
+ * it is safe under sharing to do so. One use-case is in the withType method below
+ * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer,
+ * where we overwrite with a simplified version of the type itself.
+ */
+ private[dotc] def overwriteType(tpe: T) = {
+ if (this.isInstanceOf[Template[_]]) assert(tpe.isInstanceOf[WithFixedSym], s"$this <--- $tpe")
+ myTpe = tpe
+ }
+
+ /** The type of the tree. In case of an untyped tree,
+ * an UnAssignedTypeException is thrown. (Overridden by empty trees)
+ */
+ def tpe: T @uncheckedVariance = {
+ if (myTpe == null)
+ throw new UnAssignedTypeException(this)
+ myTpe
+ }
+
+ /** Copy `tpe` attribute from tree `from` into this tree, independently
+ * whether it is null or not.
+ final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = {
+ val t1 = this.withPos(from.pos)
+ val t2 =
+ if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type])
+ else t1
+ t2.asInstanceOf[ThisTree[T]]
+ }
+ */
+
+ /** Return a typed tree that's isomorphic to this tree, but has given
+ * type. (Overridden by empty trees)
+ */
+ def withType(tpe: Type)(implicit ctx: Context): ThisTree[Type] = {
+ if (tpe == ErrorType) assert(ctx.reporter.errorsReported)
+ withTypeUnchecked(tpe)
+ }
+
+ def withTypeUnchecked(tpe: Type): ThisTree[Type] = {
+ val tree =
+ (if (myTpe == null ||
+ (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this
+ else clone).asInstanceOf[Tree[Type]]
+ tree overwriteType tpe
+ tree.asInstanceOf[ThisTree[Type]]
+ }
+
+ /** Does the tree have its type field set? Note: this operation is not
+ * referentially transparent, because it can observe the withType
+ * modifications. Should be used only in special circumstances (we
+ * need it for printing trees with optional type info).
+ */
+ final def hasType: Boolean = myTpe != null
+
+ final def typeOpt: Type = myTpe match {
+ case tp: Type => tp
+ case _ => NoType
+ }
+
+ /** The denotation referred tno by this tree.
+ * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other
+ * kinds of trees
+ */
+ def denot(implicit ctx: Context): Denotation = NoDenotation
+
+ /** Shorthand for `denot.symbol`. */
+ final def symbol(implicit ctx: Context): Symbol = denot.symbol
+
+ /** Does this tree represent a type? */
+ def isType: Boolean = false
+
+ /** Does this tree represent a term? */
+ def isTerm: Boolean = false
+
+ /** Is this a legal part of a pattern which is not at the same time a term? */
+ def isPattern: Boolean = false
+
+ /** Does this tree define a new symbol that is not defined elsewhere? */
+ def isDef: Boolean = false
+
+ /** Is this tree either the empty tree or the empty ValDef or an empty type ident? */
+ def isEmpty: Boolean = false
+
+ /** Convert tree to a list. Gives a singleton list, except
+ * for thickets which return their element trees.
+ */
+ def toList: List[Tree[T]] = this :: Nil
+
+ /** if this tree is the empty tree, the alternative, else this tree */
+ def orElse[U >: Untyped <: T](that: => Tree[U]): Tree[U] =
+ if (this eq genericEmptyTree) that else this
+
+ /** The number of nodes in this tree */
+ def treeSize: Int = {
+ var s = 1
+ def addSize(elem: Any): Unit = elem match {
+ case t: Tree[_] => s += t.treeSize
+ case ts: List[_] => ts foreach addSize
+ case _ =>
+ }
+ productIterator foreach addSize
+ s
+ }
+
+ /** If this is a thicket, perform `op` on each of its trees
+ * otherwise, perform `op` ion tree itself.
+ */
+ def foreachInThicket(op: Tree[T] => Unit): Unit = op(this)
+
+ override def toText(printer: Printer) = printer.toText(this)
+
+ override def hashCode(): Int = uniqueId // for debugging; was: System.identityHashCode(this)
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ override def clone: Tree[T] = {
+ val tree = super.clone.asInstanceOf[Tree[T]]
+ tree.myUniqueId = nxId
+ tree
+ }
+ }
+
+ class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException {
+ override def getMessage: String = s"type of $tree is not assigned"
+ }
+
+ // ------ Categories of trees -----------------------------------
+
+ /** Instances of this class are trees for which isType is definitely true.
+ * Note that some trees have isType = true without being TypTrees (e.g. Ident, AnnotatedTree)
+ */
+ trait TypTree[-T >: Untyped] extends Tree[T] {
+ type ThisTree[-T >: Untyped] <: TypTree[T]
+ override def isType = true
+ }
+
+ /** Instances of this class are trees for which isTerm is definitely true.
+ * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, AnnotatedTree)
+ */
+ trait TermTree[-T >: Untyped] extends Tree[T] {
+ type ThisTree[-T >: Untyped] <: TermTree[T]
+ override def isTerm = true
+ }
+
+ /** Instances of this class are trees which are not terms but are legal
+ * parts of patterns.
+ */
+ trait PatternTree[-T >: Untyped] extends Tree[T] {
+ type ThisTree[-T >: Untyped] <: PatternTree[T]
+ override def isPattern = true
+ }
+
+ /** Tree's denotation can be derived from its type */
+ abstract class DenotingTree[-T >: Untyped] extends Tree[T] {
+ type ThisTree[-T >: Untyped] <: DenotingTree[T]
+ override def denot(implicit ctx: Context) = tpe match {
+ case tpe: NamedType => tpe.denot
+ case tpe: ThisType => tpe.cls.denot
+ case tpe: AnnotatedType => tpe.stripAnnots match {
+ case tpe: NamedType => tpe.denot
+ case tpe: ThisType => tpe.cls.denot
+ case _ => NoDenotation
+ }
+ case _ => NoDenotation
+ }
+ }
+
+ /** Tree's denot/isType/isTerm properties come from a subtree
+ * identified by `forwardTo`.
+ */
+ abstract class ProxyTree[-T >: Untyped] extends Tree[T] {
+ type ThisTree[-T >: Untyped] <: ProxyTree[T]
+ def forwardTo: Tree[T]
+ override def denot(implicit ctx: Context): Denotation = forwardTo.denot
+ override def isTerm = forwardTo.isTerm
+ override def isType = forwardTo.isType
+ }
+
+ /** Tree has a name */
+ abstract class NameTree[-T >: Untyped] extends DenotingTree[T] {
+ type ThisTree[-T >: Untyped] <: NameTree[T]
+ def name: Name
+ }
+
+ /** Tree refers by name to a denotation */
+ abstract class RefTree[-T >: Untyped] extends NameTree[T] {
+ type ThisTree[-T >: Untyped] <: RefTree[T]
+ def qualifier: Tree[T]
+ override def isType = name.isTypeName
+ override def isTerm = name.isTermName
+ }
+
+ /** Tree defines a new symbol */
+ trait DefTree[-T >: Untyped] extends DenotingTree[T] {
+ type ThisTree[-T >: Untyped] <: DefTree[T]
+ override def isDef = true
+ def namedType = tpe.asInstanceOf[NamedType]
+ }
+
+ /** Tree defines a new symbol and carries modifiers.
+ * The position of a MemberDef contains only the defined identifier or pattern.
+ * The envelope of a MemberDef contains the whole definition and has its point
+ * on the opening keyword (or the next token after that if keyword is missing).
+ */
+ abstract class MemberDef[-T >: Untyped] extends NameTree[T] with DefTree[T] {
+ type ThisTree[-T >: Untyped] <: MemberDef[T]
+
+ private[this] var myMods: untpd.Modifiers = null
+
+ private[dotc] def rawMods: untpd.Modifiers =
+ if (myMods == null) untpd.EmptyModifiers else myMods
+
+ def rawComment: Option[Comment] = getAttachment(DocComment)
+
+ def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = {
+ val tree = if (myMods == null || (myMods == mods)) this else clone.asInstanceOf[MemberDef[Untyped]]
+ tree.setMods(mods)
+ tree.asInstanceOf[ThisTree[Untyped]]
+ }
+
+ def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags))
+
+ def setComment(comment: Option[Comment]): ThisTree[Untyped] = {
+ comment.map(putAttachment(DocComment, _))
+ asInstanceOf[ThisTree[Untyped]]
+ }
+
+ protected def setMods(mods: untpd.Modifiers) = myMods = mods
+
+ /** The position of the name defined by this definition.
+ * This is a point position if the definition is synthetic, or a range position
+ * if the definition comes from source.
+ * It might also be that the definition does not have a position (for instance when synthesized by
+ * a calling chain from `viewExists`), in that case the return position is NoPosition.
+ */
+ def namePos =
+ if (pos.exists)
+ if (rawMods.is(Synthetic)) Position(pos.point, pos.point)
+ else Position(pos.point, pos.point + name.length, pos.point)
+ else pos
+ }
+
+ /** A ValDef or DefDef tree */
+ trait ValOrDefDef[-T >: Untyped] extends MemberDef[T] with WithLazyField[Tree[T]] {
+ def tpt: Tree[T]
+ def unforcedRhs: LazyTree = unforced
+ def rhs(implicit ctx: Context): Tree[T] = forceIfLazy
+ }
+
+ // ----------- Tree case classes ------------------------------------
+
+ /** name */
+ case class Ident[-T >: Untyped] private[ast] (name: Name)
+ extends RefTree[T] {
+ type ThisTree[-T >: Untyped] = Ident[T]
+ def qualifier: Tree[T] = genericEmptyTree
+ }
+
+ class BackquotedIdent[-T >: Untyped] private[ast] (name: Name)
+ extends Ident[T](name) {
+ override def toString = s"BackquotedIdent($name)"
+ }
+
+ /** qualifier.name, or qualifier#name, if qualifier is a type */
+ case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)
+ extends RefTree[T] {
+ type ThisTree[-T >: Untyped] = Select[T]
+ }
+
+ class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)
+ extends Select[T](qualifier, name) {
+ override def toString = s"SelectWithSig($qualifier, $name, $sig)"
+ }
+
+ /** qual.this */
+ case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)
+ extends DenotingTree[T] with TermTree[T] {
+ type ThisTree[-T >: Untyped] = This[T]
+ // Denotation of a This tree is always the underlying class; needs correction for modules.
+ override def denot(implicit ctx: Context): Denotation = {
+ tpe match {
+ case tpe @ TermRef(pre, _) if tpe.symbol is Module =>
+ tpe.symbol.moduleClass.denot.asSeenFrom(pre)
+ case _ =>
+ super.denot
+ }
+ }
+ }
+
+ /** C.super[mix], where qual = C.this */
+ case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)
+ extends ProxyTree[T] with TermTree[T] {
+ type ThisTree[-T >: Untyped] = Super[T]
+ def forwardTo = qual
+ }
+
+ abstract class GenericApply[-T >: Untyped] extends ProxyTree[T] with TermTree[T] {
+ type ThisTree[-T >: Untyped] <: GenericApply[T]
+ val fun: Tree[T]
+ val args: List[Tree[T]]
+ def forwardTo = fun
+ }
+
+ /** fun(args) */
+ case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])
+ extends GenericApply[T] {
+ type ThisTree[-T >: Untyped] = Apply[T]
+ }
+
+ /** fun[args] */
+ case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])
+ extends GenericApply[T] {
+ type ThisTree[-T >: Untyped] = TypeApply[T]
+ }
+
+ /** const */
+ case class Literal[-T >: Untyped] private[ast] (const: Constant)
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Literal[T]
+ }
+
+ /** new tpt, but no constructor call */
+ case class New[-T >: Untyped] private[ast] (tpt: Tree[T])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = New[T]
+ }
+
+ /** expr : tpt */
+ case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])
+ extends ProxyTree[T] with TermTree[T] {
+ type ThisTree[-T >: Untyped] = Typed[T]
+ def forwardTo = expr
+ }
+
+ /** name = arg, in a parameter list */
+ case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])
+ extends Tree[T] {
+ type ThisTree[-T >: Untyped] = NamedArg[T]
+ }
+
+ /** name = arg, outside a parameter list */
+ case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Assign[T]
+ }
+
+ /** { stats; expr } */
+ case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Block[T]
+ }
+
+ /** if cond then thenp else elsep */
+ case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = If[T]
+ }
+
+ /** A closure with an environment and a reference to a method.
+ * @param env The captured parameters of the closure
+ * @param meth A ref tree that refers to the method of the closure.
+ * The first (env.length) parameters of that method are filled
+ * with env values.
+ * @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type
+ * of the closure is a function type, otherwise it is the type
+ * given in `tpt`, which must be a SAM type.
+ */
+ case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Closure[T]
+ }
+
+ /** selector match { cases } */
+ case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Match[T]
+ }
+
+ /** case pat if guard => body; only appears as child of a Match */
+ case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])
+ extends Tree[T] {
+ type ThisTree[-T >: Untyped] = CaseDef[T]
+ }
+
+ /** return expr
+ * where `from` refers to the method from which the return takes place
+ * After program transformations this is not necessarily the enclosing method, because
+ * closures can intervene.
+ */
+ case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Return[T]
+ }
+
+ /** try block catch handler finally finalizer
+ *
+ * Note: if the handler is a case block CASES of the form
+ *
+ * { case1 ... caseN }
+ *
+ * the parser returns Match(EmptyTree, CASES). Desugaring and typing this yields a closure
+ * node
+ *
+ * { def $anonfun(x: Throwable) = x match CASES; Closure(Nil, $anonfun) }
+ *
+ * At some later stage when we normalize the try we can revert this to
+ *
+ * Match(EmptyTree, CASES)
+ *
+ * or else if stack is non-empty
+ *
+ * Match(EmptyTree, <case x: Throwable => $anonfun(x)>)
+ */
+ case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])
+ extends TermTree[T] {
+ type ThisTree[-T >: Untyped] = Try[T]
+ }
+
+ /** Seq(elems)
+ * @param tpt The element type of the sequence.
+ */
+ case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])
+ extends Tree[T] {
+ type ThisTree[-T >: Untyped] = SeqLiteral[T]
+ }
+
+ /** Array(elems) */
+ class JavaSeqLiteral[T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])
+ extends SeqLiteral(elems, elemtpt) {
+ override def toString = s"JavaSeqLiteral($elems, $elemtpt)"
+ }
+
+ /** A tree representing inlined code.
+ *
+ * @param call Info about the original call that was inlined
+ * Until PostTyper, this is the full call, afterwards only
+ * a reference to the toplevel class from which the call was inlined.
+ * @param bindings Bindings for proxies to be used in the inlined code
+ * @param expansion The inlined tree, minus bindings.
+ *
+ * The full inlined code is equivalent to
+ *
+ * { bindings; expansion }
+ *
+ * The reason to keep `bindings` separate is because they are typed in a
+ * different context: `bindings` represent the arguments to the inlined
+ * call, whereas `expansion` represents the body of the inlined function.
+ */
+ case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])
+ extends Tree[T] {
+ type ThisTree[-T >: Untyped] = Inlined[T]
+ }
+
+ /** A type tree that represents an existing or inferred type */
+ case class TypeTree[-T >: Untyped] ()
+ extends DenotingTree[T] with TypTree[T] {
+ type ThisTree[-T >: Untyped] = TypeTree[T]
+ override def isEmpty = !hasType
+ override def toString =
+ s"TypeTree${if (hasType) s"[$typeOpt]" else ""}"
+ }
+
+ /** ref.type */
+ case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])
+ extends DenotingTree[T] with TypTree[T] {
+ type ThisTree[-T >: Untyped] = SingletonTypeTree[T]
+ }
+
+ /** left & right */
+ case class AndTypeTree[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = AndTypeTree[T]
+ }
+
+ /** left | right */
+ case class OrTypeTree[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = OrTypeTree[T]
+ }
+
+ /** tpt { refinements } */
+ case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])
+ extends ProxyTree[T] with TypTree[T] {
+ type ThisTree[-T >: Untyped] = RefinedTypeTree[T]
+ def forwardTo = tpt
+ }
+
+ /** tpt[args] */
+ case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])
+ extends ProxyTree[T] with TypTree[T] {
+ type ThisTree[-T >: Untyped] = AppliedTypeTree[T]
+ def forwardTo = tpt
+ }
+
+ /** [typeparams] -> tpt */
+ case class PolyTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = PolyTypeTree[T]
+ }
+
+ /** => T */
+ case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = ByNameTypeTree[T]
+ }
+
+ /** >: lo <: hi */
+ case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = TypeBoundsTree[T]
+ }
+
+ /** name @ body */
+ case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])
+ extends NameTree[T] with DefTree[T] with PatternTree[T] {
+ type ThisTree[-T >: Untyped] = Bind[T]
+ override def isType = name.isTypeName
+ override def isTerm = name.isTermName
+ }
+
+ /** tree_1 | ... | tree_n */
+ case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])
+ extends PatternTree[T] {
+ type ThisTree[-T >: Untyped] = Alternative[T]
+ }
+
+ /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following
+ * components:
+ *
+ * @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`)
+ * possibly with type parameters
+ * @param implicits Any implicit parameters passed to the unapply after the selector
+ * @param patterns The argument patterns in the pattern match.
+ *
+ * It is typed with same type as first `fun` argument
+ * Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows
+ *
+ * val result = fun(sel)(implicits)
+ * if (result.isDefined) "match patterns against result"
+ */
+ case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])
+ extends PatternTree[T] {
+ type ThisTree[-T >: Untyped] = UnApply[T]
+ }
+
+ /** mods val name: tpt = rhs */
+ case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree)
+ extends ValOrDefDef[T] {
+ type ThisTree[-T >: Untyped] = ValDef[T]
+ assert(isEmpty || tpt != genericEmptyTree)
+ def unforced = preRhs
+ protected def force(x: AnyRef) = preRhs = x
+ }
+
+ /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */
+ case class DefDef[-T >: Untyped] private[ast] (name: TermName, tparams: List[TypeDef[T]],
+ vparamss: List[List[ValDef[T]]], tpt: Tree[T], private var preRhs: LazyTree)
+ extends ValOrDefDef[T] {
+ type ThisTree[-T >: Untyped] = DefDef[T]
+ assert(tpt != genericEmptyTree)
+ def unforced = preRhs
+ protected def force(x: AnyRef) = preRhs = x
+ }
+
+ /** mods class name template or
+ * mods trait name template or
+ * mods type name = rhs or
+ * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) & (lo ne hi)
+ */
+ case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])
+ extends MemberDef[T] {
+ type ThisTree[-T >: Untyped] = TypeDef[T]
+
+ /** Is this a definition of a class? */
+ def isClassDef = rhs.isInstanceOf[Template[_]]
+ }
+
+ /** extends parents { self => body } */
+ case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parents: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList)
+ extends DefTree[T] with WithLazyField[List[Tree[T]]] {
+ type ThisTree[-T >: Untyped] = Template[T]
+ def unforcedBody = unforced
+ def unforced = preBody
+ protected def force(x: AnyRef) = preBody = x
+ def body(implicit ctx: Context): List[Tree[T]] = forceIfLazy
+ }
+
+ /** import expr.selectors
+ * where a selector is either an untyped `Ident`, `name` or
+ * an untyped thicket consisting of `name` and `rename`.
+ */
+ case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[Tree[Untyped]])
+ extends DenotingTree[T] {
+ type ThisTree[-T >: Untyped] = Import[T]
+ }
+
+ /** package pid { stats } */
+ case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])
+ extends ProxyTree[T] {
+ type ThisTree[-T >: Untyped] = PackageDef[T]
+ def forwardTo = pid
+ }
+
+ /** arg @annot */
+ case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])
+ extends ProxyTree[T] {
+ type ThisTree[-T >: Untyped] = Annotated[T]
+ def forwardTo = arg
+ }
+
+ trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] {
+ override def tpe: T @uncheckedVariance = NoType.asInstanceOf[T]
+ override def withTypeUnchecked(tpe: Type) = this.asInstanceOf[ThisTree[Type]]
+ override def pos = NoPosition
+ override def setPos(pos: Position) = {}
+ }
+
+ /** Temporary class that results from translation of ModuleDefs
+ * (and possibly other statements).
+ * The contained trees will be integrated when transformed with
+ * a `transform(List[Tree])` call.
+ */
+ case class Thicket[-T >: Untyped](trees: List[Tree[T]])
+ extends Tree[T] with WithoutTypeOrPos[T] {
+ type ThisTree[-T >: Untyped] = Thicket[T]
+ override def isEmpty: Boolean = trees.isEmpty
+ override def toList: List[Tree[T]] = flatten(trees)
+ override def toString = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")"
+ override def withPos(pos: Position): this.type = {
+ val newTrees = trees.map(_.withPos(pos))
+ new Thicket[T](newTrees).asInstanceOf[this.type]
+ }
+ override def pos = (NoPosition /: trees) ((pos, t) => pos union t.pos)
+ override def foreachInThicket(op: Tree[T] => Unit): Unit =
+ trees foreach (_.foreachInThicket(op))
+ }
+
+ class EmptyValDef[T >: Untyped] extends ValDef[T](
+ nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T]) with WithoutTypeOrPos[T] {
+ override def isEmpty: Boolean = true
+ setMods(untpd.Modifiers(PrivateLocal))
+ }
+
+ @sharable val theEmptyTree: Thicket[Type] = Thicket(Nil)
+ @sharable val theEmptyValDef = new EmptyValDef[Type]
+
+ def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]]
+ def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]]
+
+ def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = {
+ var buf: ListBuffer[Tree[T]] = null
+ var xs = trees
+ while (xs.nonEmpty) {
+ xs.head match {
+ case Thicket(elems) =>
+ if (buf == null) {
+ buf = new ListBuffer
+ var ys = trees
+ while (ys ne xs) {
+ buf += ys.head
+ ys = ys.tail
+ }
+ }
+ for (elem <- elems) {
+ assert(!elem.isInstanceOf[Thicket[_]])
+ buf += elem
+ }
+ case tree =>
+ if (buf != null) buf += tree
+ }
+ xs = xs.tail
+ }
+ if (buf != null) buf.toList else trees
+ }
+
+ // ----- Lazy trees and tree sequences
+
+ /** A tree that can have a lazy field
+ * The field is represented by some private `var` which is
+ * proxied `unforced` and `force`. Forcing the field will
+ * set the `var` to the underlying value.
+ */
+ trait WithLazyField[+T <: AnyRef] {
+ def unforced: AnyRef
+ protected def force(x: AnyRef): Unit
+ def forceIfLazy(implicit ctx: Context): T = unforced match {
+ case lzy: Lazy[T] =>
+ val x = lzy.complete
+ force(x)
+ x
+ case x: T @ unchecked => x
+ }
+ }
+
+ /** A base trait for lazy tree fields.
+ * These can be instantiated with Lazy instances which
+ * can delay tree construction until the field is first demanded.
+ */
+ trait Lazy[T <: AnyRef] {
+ def complete(implicit ctx: Context): T
+ }
+
+ // ----- Generic Tree Instances, inherited from `tpt` and `untpd`.
+
+ abstract class Instance[T >: Untyped <: Type] extends DotClass { inst =>
+
+ type Tree = Trees.Tree[T]
+ type TypTree = Trees.TypTree[T]
+ type TermTree = Trees.TermTree[T]
+ type PatternTree = Trees.PatternTree[T]
+ type DenotingTree = Trees.DenotingTree[T]
+ type ProxyTree = Trees.ProxyTree[T]
+ type NameTree = Trees.NameTree[T]
+ type RefTree = Trees.RefTree[T]
+ type DefTree = Trees.DefTree[T]
+ type MemberDef = Trees.MemberDef[T]
+ type ValOrDefDef = Trees.ValOrDefDef[T]
+
+ type Ident = Trees.Ident[T]
+ type BackquotedIdent = Trees.BackquotedIdent[T]
+ type Select = Trees.Select[T]
+ type SelectWithSig = Trees.SelectWithSig[T]
+ type This = Trees.This[T]
+ type Super = Trees.Super[T]
+ type Apply = Trees.Apply[T]
+ type TypeApply = Trees.TypeApply[T]
+ type Literal = Trees.Literal[T]
+ type New = Trees.New[T]
+ type Typed = Trees.Typed[T]
+ type NamedArg = Trees.NamedArg[T]
+ type Assign = Trees.Assign[T]
+ type Block = Trees.Block[T]
+ type If = Trees.If[T]
+ type Closure = Trees.Closure[T]
+ type Match = Trees.Match[T]
+ type CaseDef = Trees.CaseDef[T]
+ type Return = Trees.Return[T]
+ type Try = Trees.Try[T]
+ type SeqLiteral = Trees.SeqLiteral[T]
+ type JavaSeqLiteral = Trees.JavaSeqLiteral[T]
+ type Inlined = Trees.Inlined[T]
+ type TypeTree = Trees.TypeTree[T]
+ type SingletonTypeTree = Trees.SingletonTypeTree[T]
+ type AndTypeTree = Trees.AndTypeTree[T]
+ type OrTypeTree = Trees.OrTypeTree[T]
+ type RefinedTypeTree = Trees.RefinedTypeTree[T]
+ type AppliedTypeTree = Trees.AppliedTypeTree[T]
+ type PolyTypeTree = Trees.PolyTypeTree[T]
+ type ByNameTypeTree = Trees.ByNameTypeTree[T]
+ type TypeBoundsTree = Trees.TypeBoundsTree[T]
+ type Bind = Trees.Bind[T]
+ type Alternative = Trees.Alternative[T]
+ type UnApply = Trees.UnApply[T]
+ type ValDef = Trees.ValDef[T]
+ type DefDef = Trees.DefDef[T]
+ type TypeDef = Trees.TypeDef[T]
+ type Template = Trees.Template[T]
+ type Import = Trees.Import[T]
+ type PackageDef = Trees.PackageDef[T]
+ type Annotated = Trees.Annotated[T]
+ type Thicket = Trees.Thicket[T]
+
+ @sharable val EmptyTree: Thicket = genericEmptyTree
+ @sharable val EmptyValDef: ValDef = genericEmptyValDef
+
+ // ----- Auxiliary creation methods ------------------
+
+ def Thicket(trees: List[Tree]): Thicket = new Thicket(trees)
+ def Thicket(): Thicket = EmptyTree
+ def Thicket(x1: Tree, x2: Tree): Thicket = Thicket(x1 :: x2 :: Nil)
+ def Thicket(x1: Tree, x2: Tree, x3: Tree): Thicket = Thicket(x1 :: x2 :: x3 :: Nil)
+ def flatTree(xs: List[Tree]): Tree = flatten(xs) match {
+ case x :: Nil => x
+ case ys => Thicket(ys)
+ }
+
+ // ----- Helper classes for copying, transforming, accumulating -----------------
+
+ val cpy: TreeCopier
+
+ /** A class for copying trees. The copy methods avoid creating a new tree
+ * If all arguments stay the same.
+ *
+ * Note: Some of the copy methods take a context.
+ * These are exactly those methods that are overridden in TypedTreeCopier
+ * so that they selectively retype themselves. Retyping needs a context.
+ */
+ abstract class TreeCopier {
+
+ def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T]
+ def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T]
+
+ def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] =
+ postProcess(tree, copied withPos tree.pos)
+
+ def finalize(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] =
+ postProcess(tree, copied withPos tree.pos)
+
+ def Ident(tree: Tree)(name: Name): Ident = tree match {
+ case tree: BackquotedIdent =>
+ if (name == tree.name) tree
+ else finalize(tree, new BackquotedIdent(name))
+ case tree: Ident if name == tree.name => tree
+ case _ => finalize(tree, untpd.Ident(name))
+ }
+ def Select(tree: Tree)(qualifier: Tree, name: Name)(implicit ctx: Context): Select = tree match {
+ case tree: SelectWithSig =>
+ if ((qualifier eq tree.qualifier) && (name == tree.name)) tree
+ else finalize(tree, new SelectWithSig(qualifier, name, tree.sig))
+ case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree
+ case _ => finalize(tree, untpd.Select(qualifier, name))
+ }
+ def This(tree: Tree)(qual: untpd.Ident): This = tree match {
+ case tree: This if qual eq tree.qual => tree
+ case _ => finalize(tree, untpd.This(qual))
+ }
+ def Super(tree: Tree)(qual: Tree, mix: untpd.Ident): Super = tree match {
+ case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree
+ case _ => finalize(tree, untpd.Super(qual, mix))
+ }
+ def Apply(tree: Tree)(fun: Tree, args: List[Tree])(implicit ctx: Context): Apply = tree match {
+ case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree
+ case _ => finalize(tree, untpd.Apply(fun, args))
+ }
+ def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(implicit ctx: Context): TypeApply = tree match {
+ case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree
+ case _ => finalize(tree, untpd.TypeApply(fun, args))
+ }
+ def Literal(tree: Tree)(const: Constant)(implicit ctx: Context): Literal = tree match {
+ case tree: Literal if const == tree.const => tree
+ case _ => finalize(tree, untpd.Literal(const))
+ }
+ def New(tree: Tree)(tpt: Tree)(implicit ctx: Context): New = tree match {
+ case tree: New if tpt eq tree.tpt => tree
+ case _ => finalize(tree, untpd.New(tpt))
+ }
+ def Typed(tree: Tree)(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed = tree match {
+ case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree
+ case _ => finalize(tree, untpd.Typed(expr, tpt))
+ }
+ def NamedArg(tree: Tree)(name: Name, arg: Tree)(implicit ctx: Context): NamedArg = tree match {
+ case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree
+ case _ => finalize(tree, untpd.NamedArg(name, arg))
+ }
+ def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(implicit ctx: Context): Assign = tree match {
+ case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree
+ case _ => finalize(tree, untpd.Assign(lhs, rhs))
+ }
+ def Block(tree: Tree)(stats: List[Tree], expr: Tree)(implicit ctx: Context): Block = tree match {
+ case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree
+ case _ => finalize(tree, untpd.Block(stats, expr))
+ }
+ def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(implicit ctx: Context): If = tree match {
+ case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree
+ case _ => finalize(tree, untpd.If(cond, thenp, elsep))
+ }
+ def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(implicit ctx: Context): Closure = tree match {
+ case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree
+ case _ => finalize(tree, untpd.Closure(env, meth, tpt))
+ }
+ def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = tree match {
+ case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree
+ case _ => finalize(tree, untpd.Match(selector, cases))
+ }
+ def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(implicit ctx: Context): CaseDef = tree match {
+ case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree
+ case _ => finalize(tree, untpd.CaseDef(pat, guard, body))
+ }
+ def Return(tree: Tree)(expr: Tree, from: Tree)(implicit ctx: Context): Return = tree match {
+ case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree
+ case _ => finalize(tree, untpd.Return(expr, from))
+ }
+ def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit ctx: Context): Try = tree match {
+ case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree
+ case _ => finalize(tree, untpd.Try(expr, cases, finalizer))
+ }
+ def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): SeqLiteral = tree match {
+ case tree: JavaSeqLiteral =>
+ if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree
+ else finalize(tree, new JavaSeqLiteral(elems, elemtpt))
+ case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree
+ case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt))
+ }
+ def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit ctx: Context): Inlined = tree match {
+ case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree
+ case _ => finalize(tree, untpd.Inlined(call, bindings, expansion))
+ }
+ def SingletonTypeTree(tree: Tree)(ref: Tree): SingletonTypeTree = tree match {
+ case tree: SingletonTypeTree if ref eq tree.ref => tree
+ case _ => finalize(tree, untpd.SingletonTypeTree(ref))
+ }
+ def AndTypeTree(tree: Tree)(left: Tree, right: Tree): AndTypeTree = tree match {
+ case tree: AndTypeTree if (left eq tree.left) && (right eq tree.right) => tree
+ case _ => finalize(tree, untpd.AndTypeTree(left, right))
+ }
+ def OrTypeTree(tree: Tree)(left: Tree, right: Tree): OrTypeTree = tree match {
+ case tree: OrTypeTree if (left eq tree.left) && (right eq tree.right) => tree
+ case _ => finalize(tree, untpd.OrTypeTree(left, right))
+ }
+ def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = tree match {
+ case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree
+ case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements))
+ }
+ def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree]): AppliedTypeTree = tree match {
+ case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree
+ case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args))
+ }
+ def PolyTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree): PolyTypeTree = tree match {
+ case tree: PolyTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree
+ case _ => finalize(tree, untpd.PolyTypeTree(tparams, body))
+ }
+ def ByNameTypeTree(tree: Tree)(result: Tree): ByNameTypeTree = tree match {
+ case tree: ByNameTypeTree if result eq tree.result => tree
+ case _ => finalize(tree, untpd.ByNameTypeTree(result))
+ }
+ def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree): TypeBoundsTree = tree match {
+ case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) => tree
+ case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi))
+ }
+ def Bind(tree: Tree)(name: Name, body: Tree): Bind = tree match {
+ case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree
+ case _ => finalize(tree, untpd.Bind(name, body))
+ }
+ def Alternative(tree: Tree)(trees: List[Tree]): Alternative = tree match {
+ case tree: Alternative if trees eq tree.trees => tree
+ case _ => finalize(tree, untpd.Alternative(trees))
+ }
+ def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree]): UnApply = tree match {
+ case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree
+ case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns))
+ }
+ def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree): ValDef = tree match {
+ case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree
+ case _ => finalize(tree, untpd.ValDef(name, tpt, rhs))
+ }
+ def DefDef(tree: Tree)(name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: LazyTree): DefDef = tree match {
+ case tree: DefDef if (name == tree.name) && (tparams eq tree.tparams) && (vparamss eq tree.vparamss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree
+ case _ => finalize(tree, untpd.DefDef(name, tparams, vparamss, tpt, rhs))
+ }
+ def TypeDef(tree: Tree)(name: TypeName, rhs: Tree): TypeDef = tree match {
+ case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree
+ case _ => finalize(tree, untpd.TypeDef(name, rhs))
+ }
+ def Template(tree: Tree)(constr: DefDef, parents: List[Tree], self: ValDef, body: LazyTreeList): Template = tree match {
+ case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (self eq tree.self) && (body eq tree.unforcedBody) => tree
+ case _ => finalize(tree, untpd.Template(constr, parents, self, body))
+ }
+ def Import(tree: Tree)(expr: Tree, selectors: List[untpd.Tree]): Import = tree match {
+ case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree
+ case _ => finalize(tree, untpd.Import(expr, selectors))
+ }
+ def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree]): PackageDef = tree match {
+ case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree
+ case _ => finalize(tree, untpd.PackageDef(pid, stats))
+ }
+ def Annotated(tree: Tree)(arg: Tree, annot: Tree)(implicit ctx: Context): Annotated = tree match {
+ case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree
+ case _ => finalize(tree, untpd.Annotated(arg, annot))
+ }
+ def Thicket(tree: Tree)(trees: List[Tree]): Thicket = tree match {
+ case tree: Thicket if trees eq tree.trees => tree
+ case _ => finalize(tree, untpd.Thicket(trees))
+ }
+
+ // Copier methods with default arguments; these demand that the original tree
+ // is of the same class as the copy. We only include trees with more than 2 elements here.
+ def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(implicit ctx: Context): If =
+ If(tree: Tree)(cond, thenp, elsep)
+ def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(implicit ctx: Context): Closure =
+ Closure(tree: Tree)(env, meth, tpt)
+ def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(implicit ctx: Context): CaseDef =
+ CaseDef(tree: Tree)(pat, guard, body)
+ def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(implicit ctx: Context): Try =
+ Try(tree: Tree)(expr, cases, finalizer)
+ def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns): UnApply =
+ UnApply(tree: Tree)(fun, implicits, patterns)
+ def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs): ValDef =
+ ValDef(tree: Tree)(name, tpt, rhs)
+ def DefDef(tree: DefDef)(name: TermName = tree.name, tparams: List[TypeDef] = tree.tparams, vparamss: List[List[ValDef]] = tree.vparamss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs): DefDef =
+ DefDef(tree: Tree)(name, tparams, vparamss, tpt, rhs)
+ def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs): TypeDef =
+ TypeDef(tree: Tree)(name, rhs)
+ def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody): Template =
+ Template(tree: Tree)(constr, parents, self, body)
+ }
+
+ abstract class TreeMap(val cpy: TreeCopier = inst.cpy) {
+
+ def transform(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case Ident(name) =>
+ tree
+ case Select(qualifier, name) =>
+ cpy.Select(tree)(transform(qualifier), name)
+ case This(qual) =>
+ tree
+ case Super(qual, mix) =>
+ cpy.Super(tree)(transform(qual), mix)
+ case Apply(fun, args) =>
+ cpy.Apply(tree)(transform(fun), transform(args))
+ case TypeApply(fun, args) =>
+ cpy.TypeApply(tree)(transform(fun), transform(args))
+ case Literal(const) =>
+ tree
+ case New(tpt) =>
+ cpy.New(tree)(transform(tpt))
+ case Typed(expr, tpt) =>
+ cpy.Typed(tree)(transform(expr), transform(tpt))
+ case NamedArg(name, arg) =>
+ cpy.NamedArg(tree)(name, transform(arg))
+ case Assign(lhs, rhs) =>
+ cpy.Assign(tree)(transform(lhs), transform(rhs))
+ case Block(stats, expr) =>
+ cpy.Block(tree)(transformStats(stats), transform(expr))
+ case If(cond, thenp, elsep) =>
+ cpy.If(tree)(transform(cond), transform(thenp), transform(elsep))
+ case Closure(env, meth, tpt) =>
+ cpy.Closure(tree)(transform(env), transform(meth), transform(tpt))
+ case Match(selector, cases) =>
+ cpy.Match(tree)(transform(selector), transformSub(cases))
+ case CaseDef(pat, guard, body) =>
+ cpy.CaseDef(tree)(transform(pat), transform(guard), transform(body))
+ case Return(expr, from) =>
+ cpy.Return(tree)(transform(expr), transformSub(from))
+ case Try(block, cases, finalizer) =>
+ cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer))
+ case SeqLiteral(elems, elemtpt) =>
+ cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt))
+ case Inlined(call, bindings, expansion) =>
+ cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion))
+ case TypeTree() =>
+ tree
+ case SingletonTypeTree(ref) =>
+ cpy.SingletonTypeTree(tree)(transform(ref))
+ case AndTypeTree(left, right) =>
+ cpy.AndTypeTree(tree)(transform(left), transform(right))
+ case OrTypeTree(left, right) =>
+ cpy.OrTypeTree(tree)(transform(left), transform(right))
+ case RefinedTypeTree(tpt, refinements) =>
+ cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements))
+ case AppliedTypeTree(tpt, args) =>
+ cpy.AppliedTypeTree(tree)(transform(tpt), transform(args))
+ case PolyTypeTree(tparams, body) =>
+ cpy.PolyTypeTree(tree)(transformSub(tparams), transform(body))
+ case ByNameTypeTree(result) =>
+ cpy.ByNameTypeTree(tree)(transform(result))
+ case TypeBoundsTree(lo, hi) =>
+ cpy.TypeBoundsTree(tree)(transform(lo), transform(hi))
+ case Bind(name, body) =>
+ cpy.Bind(tree)(name, transform(body))
+ case Alternative(trees) =>
+ cpy.Alternative(tree)(transform(trees))
+ case UnApply(fun, implicits, patterns) =>
+ cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns))
+ case EmptyValDef =>
+ tree
+ case tree @ ValDef(name, tpt, _) =>
+ val tpt1 = transform(tpt)
+ val rhs1 = transform(tree.rhs)
+ cpy.ValDef(tree)(name, tpt1, rhs1)
+ case tree @ DefDef(name, tparams, vparamss, tpt, _) =>
+ cpy.DefDef(tree)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt), transform(tree.rhs))
+ case tree @ TypeDef(name, rhs) =>
+ cpy.TypeDef(tree)(name, transform(rhs))
+ case tree @ Template(constr, parents, self, _) =>
+ cpy.Template(tree)(transformSub(constr), transform(parents), transformSub(self), transformStats(tree.body))
+ case Import(expr, selectors) =>
+ cpy.Import(tree)(transform(expr), selectors)
+ case PackageDef(pid, stats) =>
+ cpy.PackageDef(tree)(transformSub(pid), transformStats(stats))
+ case Annotated(arg, annot) =>
+ cpy.Annotated(tree)(transform(arg), transform(annot))
+ case Thicket(trees) =>
+ val trees1 = transform(trees)
+ if (trees1 eq trees) tree else Thicket(trees1)
+ }
+
+ def transformStats(trees: List[Tree])(implicit ctx: Context): List[Tree] =
+ transform(trees)
+ def transform(trees: List[Tree])(implicit ctx: Context): List[Tree] =
+ flatten(trees mapConserve (transform(_)))
+ def transformSub[Tr <: Tree](tree: Tr)(implicit ctx: Context): Tr =
+ transform(tree).asInstanceOf[Tr]
+ def transformSub[Tr <: Tree](trees: List[Tr])(implicit ctx: Context): List[Tr] =
+ transform(trees).asInstanceOf[List[Tr]]
+ }
+
+ abstract class TreeAccumulator[X] {
+ def apply(x: X, tree: Tree)(implicit ctx: Context): X
+ def apply(x: X, trees: Traversable[Tree])(implicit ctx: Context): X = (x /: trees)(apply)
+ def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = {
+ def localCtx =
+ if (tree.hasType && tree.symbol.exists) ctx.withOwner(tree.symbol) else ctx
+ tree match {
+ case Ident(name) =>
+ x
+ case Select(qualifier, name) =>
+ this(x, qualifier)
+ case This(qual) =>
+ x
+ case Super(qual, mix) =>
+ this(x, qual)
+ case Apply(fun, args) =>
+ this(this(x, fun), args)
+ case TypeApply(fun, args) =>
+ this(this(x, fun), args)
+ case Literal(const) =>
+ x
+ case New(tpt) =>
+ this(x, tpt)
+ case Typed(expr, tpt) =>
+ this(this(x, expr), tpt)
+ case NamedArg(name, arg) =>
+ this(x, arg)
+ case Assign(lhs, rhs) =>
+ this(this(x, lhs), rhs)
+ case Block(stats, expr) =>
+ this(this(x, stats), expr)
+ case If(cond, thenp, elsep) =>
+ this(this(this(x, cond), thenp), elsep)
+ case Closure(env, meth, tpt) =>
+ this(this(this(x, env), meth), tpt)
+ case Match(selector, cases) =>
+ this(this(x, selector), cases)
+ case CaseDef(pat, guard, body) =>
+ this(this(this(x, pat), guard), body)
+ case Return(expr, from) =>
+ this(this(x, expr), from)
+ case Try(block, handler, finalizer) =>
+ this(this(this(x, block), handler), finalizer)
+ case SeqLiteral(elems, elemtpt) =>
+ this(this(x, elems), elemtpt)
+ case Inlined(call, bindings, expansion) =>
+ this(this(x, bindings), expansion)
+ case TypeTree() =>
+ x
+ case SingletonTypeTree(ref) =>
+ this(x, ref)
+ case AndTypeTree(left, right) =>
+ this(this(x, left), right)
+ case OrTypeTree(left, right) =>
+ this(this(x, left), right)
+ case RefinedTypeTree(tpt, refinements) =>
+ this(this(x, tpt), refinements)
+ case AppliedTypeTree(tpt, args) =>
+ this(this(x, tpt), args)
+ case PolyTypeTree(tparams, body) =>
+ implicit val ctx: Context = localCtx
+ this(this(x, tparams), body)
+ case ByNameTypeTree(result) =>
+ this(x, result)
+ case TypeBoundsTree(lo, hi) =>
+ this(this(x, lo), hi)
+ case Bind(name, body) =>
+ this(x, body)
+ case Alternative(trees) =>
+ this(x, trees)
+ case UnApply(fun, implicits, patterns) =>
+ this(this(this(x, fun), implicits), patterns)
+ case tree @ ValDef(name, tpt, _) =>
+ implicit val ctx: Context = localCtx
+ this(this(x, tpt), tree.rhs)
+ case tree @ DefDef(name, tparams, vparamss, tpt, _) =>
+ implicit val ctx: Context = localCtx
+ this(this((this(x, tparams) /: vparamss)(apply), tpt), tree.rhs)
+ case TypeDef(name, rhs) =>
+ implicit val ctx: Context = localCtx
+ this(x, rhs)
+ case tree @ Template(constr, parents, self, _) =>
+ this(this(this(this(x, constr), parents), self), tree.body)
+ case Import(expr, selectors) =>
+ this(x, expr)
+ case PackageDef(pid, stats) =>
+ this(this(x, pid), stats)(localCtx)
+ case Annotated(arg, annot) =>
+ this(this(x, arg), annot)
+ case Thicket(ts) =>
+ this(x, ts)
+ }
+ }
+ }
+
+ abstract class TreeTraverser extends TreeAccumulator[Unit] {
+ def traverse(tree: Tree)(implicit ctx: Context): Unit
+ def apply(x: Unit, tree: Tree)(implicit ctx: Context) = traverse(tree)
+ protected def traverseChildren(tree: Tree)(implicit ctx: Context) = foldOver((), tree)
+ }
+
+ /** Fold `f` over all tree nodes, in depth-first, prefix order */
+ class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] {
+ def apply(x: X, tree: Tree)(implicit ctx: Context): X = foldOver(f(x, tree), tree)
+ }
+
+ /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit
+ * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`.
+ */
+ class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] {
+ def apply(x: X, tree: Tree)(implicit ctx: Context): X = {
+ val x1 = f(x, tree)
+ if (x1.asInstanceOf[AnyRef] ne x1.asInstanceOf[AnyRef]) x1
+ else foldOver(x1, tree)
+ }
+ }
+
+ def rename(tree: NameTree, newName: Name)(implicit ctx: Context): tree.ThisTree[T] = {
+ tree match {
+ case tree: Ident => cpy.Ident(tree)(newName)
+ case tree: Select => cpy.Select(tree)(tree.qualifier, newName)
+ case tree: Bind => cpy.Bind(tree)(newName, tree.body)
+ case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName)
+ case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName)
+ case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName)
+ }
+ }.asInstanceOf[tree.ThisTree[T]]
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala
new file mode 100644
index 000000000..44e1cf188
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala
@@ -0,0 +1,952 @@
+package dotty.tools
+package dotc
+package ast
+
+import dotty.tools.dotc.transform.{ExplicitOuter, Erasure}
+import dotty.tools.dotc.typer.ProtoTypes.FunProtoTyped
+import transform.SymUtils._
+import core._
+import util.Positions._, Types._, Contexts._, Constants._, Names._, Flags._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Symbols._
+import Denotations._, Decorators._, DenotTransformers._
+import collection.mutable
+import util.{Property, SourceFile, NoSource}
+import typer.ErrorReporting._
+
+import scala.annotation.tailrec
+import scala.io.Codec
+
+/** Some creators for typed trees */
+object tpd extends Trees.Instance[Type] with TypedTreeInfo {
+
+ private def ta(implicit ctx: Context) = ctx.typeAssigner
+
+ def Ident(tp: NamedType)(implicit ctx: Context): Ident =
+ ta.assignType(untpd.Ident(tp.name), tp)
+
+ def Select(qualifier: Tree, name: Name)(implicit ctx: Context): Select =
+ ta.assignType(untpd.Select(qualifier, name), qualifier)
+
+ def Select(qualifier: Tree, tp: NamedType)(implicit ctx: Context): Select =
+ untpd.Select(qualifier, tp.name).withType(tp)
+
+ def This(cls: ClassSymbol)(implicit ctx: Context): This =
+ untpd.This(untpd.Ident(cls.name)).withType(cls.thisType)
+
+ def Super(qual: Tree, mix: untpd.Ident, inConstrCall: Boolean, mixinClass: Symbol)(implicit ctx: Context): Super =
+ ta.assignType(untpd.Super(qual, mix), qual, inConstrCall, mixinClass)
+
+ def Super(qual: Tree, mixName: TypeName, inConstrCall: Boolean, mixinClass: Symbol = NoSymbol)(implicit ctx: Context): Super =
+ Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), inConstrCall, mixinClass)
+
+ def Apply(fn: Tree, args: List[Tree])(implicit ctx: Context): Apply =
+ ta.assignType(untpd.Apply(fn, args), fn, args)
+
+ def TypeApply(fn: Tree, args: List[Tree])(implicit ctx: Context): TypeApply =
+ ta.assignType(untpd.TypeApply(fn, args), fn, args)
+
+ def Literal(const: Constant)(implicit ctx: Context): Literal =
+ ta.assignType(untpd.Literal(const))
+
+ def unitLiteral(implicit ctx: Context): Literal =
+ Literal(Constant(()))
+
+ def New(tpt: Tree)(implicit ctx: Context): New =
+ ta.assignType(untpd.New(tpt), tpt)
+
+ def New(tp: Type)(implicit ctx: Context): New = New(TypeTree(tp))
+
+ def Typed(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed =
+ ta.assignType(untpd.Typed(expr, tpt), tpt)
+
+ def NamedArg(name: Name, arg: Tree)(implicit ctx: Context): NamedArg =
+ ta.assignType(untpd.NamedArg(name, arg), arg)
+
+ def Assign(lhs: Tree, rhs: Tree)(implicit ctx: Context): Assign =
+ ta.assignType(untpd.Assign(lhs, rhs))
+
+ def Block(stats: List[Tree], expr: Tree)(implicit ctx: Context): Block =
+ ta.assignType(untpd.Block(stats, expr), stats, expr)
+
+ /** Join `stats` in front of `expr` creating a new block if necessary */
+ def seq(stats: List[Tree], expr: Tree)(implicit ctx: Context): Tree =
+ if (stats.isEmpty) expr
+ else expr match {
+ case Block(estats, eexpr) => cpy.Block(expr)(stats ::: estats, eexpr)
+ case _ => Block(stats, expr)
+ }
+
+ def If(cond: Tree, thenp: Tree, elsep: Tree)(implicit ctx: Context): If =
+ ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep)
+
+ def Closure(env: List[Tree], meth: Tree, tpt: Tree)(implicit ctx: Context): Closure =
+ ta.assignType(untpd.Closure(env, meth, tpt), meth, tpt)
+
+ /** A function def
+ *
+ * vparams => expr
+ *
+ * gets expanded to
+ *
+ * { def $anonfun(vparams) = expr; Closure($anonfun) }
+ *
+ * where the closure's type is the target type of the expression (FunctionN, unless
+ * otherwise specified).
+ */
+ def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(implicit ctx: Context): Block = {
+ val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree
+ val call =
+ if (targs.isEmpty) Ident(TermRef(NoPrefix, meth))
+ else TypeApply(Ident(TermRef(NoPrefix, meth)), targs)
+ Block(
+ DefDef(meth, rhsFn) :: Nil,
+ Closure(Nil, call, targetTpt))
+ }
+
+ def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit ctx: Context): CaseDef =
+ ta.assignType(untpd.CaseDef(pat, guard, body), body)
+
+ def Match(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match =
+ ta.assignType(untpd.Match(selector, cases), cases)
+
+ def Return(expr: Tree, from: Tree)(implicit ctx: Context): Return =
+ ta.assignType(untpd.Return(expr, from))
+
+ def Try(block: Tree, cases: List[CaseDef], finalizer: Tree)(implicit ctx: Context): Try =
+ ta.assignType(untpd.Try(block, cases, finalizer), block, cases)
+
+ def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): SeqLiteral =
+ ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt)
+
+ def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): JavaSeqLiteral =
+ ta.assignType(new untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral]
+
+ def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(implicit ctx: Context): Inlined =
+ ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion)
+
+ def TypeTree(tp: Type)(implicit ctx: Context): TypeTree =
+ untpd.TypeTree().withType(tp)
+
+ def SingletonTypeTree(ref: Tree)(implicit ctx: Context): SingletonTypeTree =
+ ta.assignType(untpd.SingletonTypeTree(ref), ref)
+
+ def AndTypeTree(left: Tree, right: Tree)(implicit ctx: Context): AndTypeTree =
+ ta.assignType(untpd.AndTypeTree(left, right), left, right)
+
+ def OrTypeTree(left: Tree, right: Tree)(implicit ctx: Context): OrTypeTree =
+ ta.assignType(untpd.OrTypeTree(left, right), left, right)
+
+ def RefinedTypeTree(parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(implicit ctx: Context): Tree =
+ ta.assignType(untpd.RefinedTypeTree(parent, refinements), parent, refinements, refineCls)
+
+ def AppliedTypeTree(tycon: Tree, args: List[Tree])(implicit ctx: Context): AppliedTypeTree =
+ ta.assignType(untpd.AppliedTypeTree(tycon, args), tycon, args)
+
+ def ByNameTypeTree(result: Tree)(implicit ctx: Context): ByNameTypeTree =
+ ta.assignType(untpd.ByNameTypeTree(result), result)
+
+ def PolyTypeTree(tparams: List[TypeDef], body: Tree)(implicit ctx: Context): PolyTypeTree =
+ ta.assignType(untpd.PolyTypeTree(tparams, body), tparams, body)
+
+ def TypeBoundsTree(lo: Tree, hi: Tree)(implicit ctx: Context): TypeBoundsTree =
+ ta.assignType(untpd.TypeBoundsTree(lo, hi), lo, hi)
+
+ def Bind(sym: TermSymbol, body: Tree)(implicit ctx: Context): Bind =
+ ta.assignType(untpd.Bind(sym.name, body), sym)
+
+ /** A pattern corresponding to `sym: tpe` */
+ def BindTyped(sym: TermSymbol, tpe: Type)(implicit ctx: Context): Bind =
+ Bind(sym, Typed(Underscore(tpe), TypeTree(tpe)))
+
+ def Alternative(trees: List[Tree])(implicit ctx: Context): Alternative =
+ ta.assignType(untpd.Alternative(trees), trees)
+
+ def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(implicit ctx: Context): UnApply =
+ ta.assignType(untpd.UnApply(fun, implicits, patterns), proto)
+
+ def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree)(implicit ctx: Context): ValDef =
+ ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info), rhs), sym)
+
+ def SyntheticValDef(name: TermName, rhs: Tree)(implicit ctx: Context): ValDef =
+ ValDef(ctx.newSymbol(ctx.owner, name, Synthetic, rhs.tpe.widen, coord = rhs.pos), rhs)
+
+ def DefDef(sym: TermSymbol, rhs: Tree = EmptyTree)(implicit ctx: Context): DefDef =
+ ta.assignType(DefDef(sym, Function.const(rhs) _), sym)
+
+ def DefDef(sym: TermSymbol, rhsFn: List[List[Tree]] => Tree)(implicit ctx: Context): DefDef =
+ polyDefDef(sym, Function.const(rhsFn))
+
+ def polyDefDef(sym: TermSymbol, rhsFn: List[Type] => List[List[Tree]] => Tree)(implicit ctx: Context): DefDef = {
+ val (tparams, mtp) = sym.info match {
+ case tp: PolyType =>
+ val tparams = ctx.newTypeParams(sym, tp.paramNames, EmptyFlags, tp.instantiateBounds)
+ (tparams, tp.instantiate(tparams map (_.typeRef)))
+ case tp => (Nil, tp)
+ }
+
+ def valueParamss(tp: Type): (List[List[TermSymbol]], Type) = tp match {
+ case tp @ MethodType(paramNames, paramTypes) =>
+ def valueParam(name: TermName, info: Type): TermSymbol = {
+ val maybeImplicit = if (tp.isInstanceOf[ImplicitMethodType]) Implicit else EmptyFlags
+ ctx.newSymbol(sym, name, TermParam | maybeImplicit, info)
+ }
+ val params = (paramNames, paramTypes).zipped.map(valueParam)
+ val (paramss, rtp) = valueParamss(tp.instantiate(params map (_.termRef)))
+ (params :: paramss, rtp)
+ case tp => (Nil, tp.widenExpr)
+ }
+ val (vparamss, rtp) = valueParamss(mtp)
+ val targs = tparams map (_.typeRef)
+ val argss = vparamss.nestedMap(vparam => Ident(vparam.termRef))
+ ta.assignType(
+ untpd.DefDef(
+ sym.name,
+ tparams map TypeDef,
+ vparamss.nestedMap(ValDef(_)),
+ TypeTree(rtp),
+ rhsFn(targs)(argss)),
+ sym)
+ }
+
+ def TypeDef(sym: TypeSymbol)(implicit ctx: Context): TypeDef =
+ ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym)
+
+ def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(implicit ctx: Context): TypeDef = {
+ val firstParentRef :: otherParentRefs = cls.info.parents
+ val firstParent = cls.typeRef.baseTypeWithArgs(firstParentRef.symbol)
+ val superRef =
+ if (cls is Trait) TypeTree(firstParent)
+ else {
+ def isApplicable(ctpe: Type): Boolean = ctpe match {
+ case ctpe: PolyType =>
+ isApplicable(ctpe.instantiate(firstParent.argTypes))
+ case ctpe: MethodType =>
+ (superArgs corresponds ctpe.paramTypes)(_.tpe <:< _)
+ case _ =>
+ false
+ }
+ val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info))
+ New(firstParent, constr.symbol.asTerm, superArgs)
+ }
+ val parents = superRef :: otherParentRefs.map(TypeTree(_))
+
+ val selfType =
+ if (cls.classInfo.selfInfo ne NoType) ValDef(ctx.newSelfSym(cls))
+ else EmptyValDef
+ def isOwnTypeParam(stat: Tree) =
+ (stat.symbol is TypeParam) && stat.symbol.owner == cls
+ val bodyTypeParams = body filter isOwnTypeParam map (_.symbol)
+ val newTypeParams =
+ for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam))
+ yield TypeDef(tparam)
+ val findLocalDummy = new FindLocalDummyAccumulator(cls)
+ val localDummy = ((NoSymbol: Symbol) /: body)(findLocalDummy.apply)
+ .orElse(ctx.newLocalDummy(cls))
+ val impl = untpd.Template(constr, parents, selfType, newTypeParams ++ body)
+ .withType(localDummy.nonMemberTermRef)
+ ta.assignType(untpd.TypeDef(cls.name, impl), cls)
+ }
+
+ /** An anonymous class
+ *
+ * new parents { forwarders }
+ *
+ * where `forwarders` contains forwarders for all functions in `fns`.
+ * @param parents a non-empty list of class types
+ * @param fns a non-empty of functions for which forwarders should be defined in the class.
+ * The class has the same owner as the first function in `fns`.
+ * Its position is the union of all functions in `fns`.
+ */
+ def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(implicit ctx: Context): Block = {
+ val owner = fns.head.owner
+ val parents1 =
+ if (parents.head.classSymbol.is(Trait)) defn.ObjectType :: parents
+ else parents
+ val cls = ctx.newNormalizedClassSymbol(owner, tpnme.ANON_FUN, Synthetic, parents1,
+ coord = fns.map(_.pos).reduceLeft(_ union _))
+ val constr = ctx.newConstructor(cls, Synthetic, Nil, Nil).entered
+ def forwarder(fn: TermSymbol, name: TermName) = {
+ val fwdMeth = fn.copy(cls, name, Synthetic | Method).entered.asTerm
+ DefDef(fwdMeth, prefss => ref(fn).appliedToArgss(prefss))
+ }
+ val forwarders = (fns, methNames).zipped.map(forwarder)
+ val cdef = ClassDef(cls, DefDef(constr), forwarders)
+ Block(cdef :: Nil, New(cls.typeRef, Nil))
+ }
+
+ // { <label> def while$(): Unit = if (cond) { body; while$() } ; while$() }
+ def WhileDo(owner: Symbol, cond: Tree, body: List[Tree])(implicit ctx: Context): Tree = {
+ val sym = ctx.newSymbol(owner, nme.WHILE_PREFIX, Flags.Label | Flags.Synthetic,
+ MethodType(Nil, defn.UnitType), coord = cond.pos)
+
+ val call = Apply(ref(sym), Nil)
+ val rhs = If(cond, Block(body, call), unitLiteral)
+ Block(List(DefDef(sym, rhs)), call)
+ }
+
+ def Import(expr: Tree, selectors: List[untpd.Tree])(implicit ctx: Context): Import =
+ ta.assignType(untpd.Import(expr, selectors), ctx.newImportSymbol(ctx.owner, expr))
+
+ def PackageDef(pid: RefTree, stats: List[Tree])(implicit ctx: Context): PackageDef =
+ ta.assignType(untpd.PackageDef(pid, stats), pid)
+
+ def Annotated(arg: Tree, annot: Tree)(implicit ctx: Context): Annotated =
+ ta.assignType(untpd.Annotated(arg, annot), arg, annot)
+
+ def Throw(expr: Tree)(implicit ctx: Context): Tree =
+ ref(defn.throwMethod).appliedTo(expr)
+
+ // ------ Making references ------------------------------------------------------
+
+ def prefixIsElidable(tp: NamedType)(implicit ctx: Context) = {
+ val typeIsElidable = tp.prefix match {
+ case NoPrefix =>
+ true
+ case pre: ThisType =>
+ pre.cls.isStaticOwner ||
+ tp.symbol.is(ParamOrAccessor) && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls
+ // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough
+ // and was spuriously triggered in case inner class would inherit from outer one
+ // eg anonymous TypeMap inside TypeMap.andThen
+ case pre: TermRef =>
+ pre.symbol.is(Module) && pre.symbol.isStatic
+ case _ =>
+ false
+ }
+ typeIsElidable ||
+ tp.symbol.is(JavaStatic) ||
+ tp.symbol.hasAnnotation(defn.ScalaStaticAnnot)
+ }
+
+ def needsSelect(tp: Type)(implicit ctx: Context) = tp match {
+ case tp: TermRef => !prefixIsElidable(tp)
+ case _ => false
+ }
+
+ /** A tree representing the same reference as the given type */
+ def ref(tp: NamedType)(implicit ctx: Context): Tree =
+ if (tp.isType) TypeTree(tp)
+ else if (prefixIsElidable(tp)) Ident(tp)
+ else if (tp.symbol.is(Module) && ctx.owner.isContainedIn(tp.symbol.moduleClass))
+ followOuterLinks(This(tp.symbol.moduleClass.asClass))
+ else if (tp.symbol hasAnnotation defn.ScalaStaticAnnot)
+ Ident(tp)
+ else tp.prefix match {
+ case pre: SingletonType => followOuterLinks(singleton(pre)).select(tp)
+ case pre => Select(TypeTree(pre), tp)
+ } // no checks necessary
+
+ def ref(sym: Symbol)(implicit ctx: Context): Tree =
+ ref(NamedType(sym.owner.thisType, sym.name, sym.denot))
+
+ private def followOuterLinks(t: Tree)(implicit ctx: Context) = t match {
+ case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) =>
+ // after erasure outer paths should be respected
+ new ExplicitOuter.OuterOps(ctx).path(t.tpe.widen.classSymbol)
+ case t =>
+ t
+ }
+
+ def singleton(tp: Type)(implicit ctx: Context): Tree = tp match {
+ case tp: TermRef => ref(tp)
+ case tp: ThisType => This(tp.cls)
+ case tp: SkolemType => singleton(tp.narrow)
+ case SuperType(qual, _) => singleton(qual)
+ case ConstantType(value) => Literal(value)
+ }
+
+ /** A tree representing a `newXYZArray` operation of the right
+ * kind for the given element type in `typeArg`. No type arguments or
+ * `length` arguments are given.
+ */
+ def newArray(elemTpe: Type, returnTpe: Type, pos: Position, dims: JavaSeqLiteral)(implicit ctx: Context): Tree = {
+ val elemClass = elemTpe.classSymbol
+ def newArr =
+ ref(defn.DottyArraysModule).select(defn.newArrayMethod).withPos(pos)
+
+ if (!ctx.erasedTypes) {
+ assert(!TypeErasure.isUnboundedGeneric(elemTpe)) //needs to be done during typer. See Applications.convertNewGenericArray
+ newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withPos(pos)
+ } else // after erasure
+ newArr.appliedToArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withPos(pos)
+ }
+
+ // ------ Creating typed equivalents of trees that exist only in untyped form -------
+
+ /** new C(args), calling the primary constructor of C */
+ def New(tp: Type, args: List[Tree])(implicit ctx: Context): Apply =
+ New(tp, tp.typeSymbol.primaryConstructor.asTerm, args)
+
+ /** new C(args), calling given constructor `constr` of C */
+ def New(tp: Type, constr: TermSymbol, args: List[Tree])(implicit ctx: Context): Apply = {
+ val targs = tp.argTypes
+ val tycon = tp.withoutArgs(targs)
+ New(tycon)
+ .select(TermRef.withSig(tycon, constr))
+ .appliedToTypes(targs)
+ .appliedToArgs(args)
+ }
+
+ /** An object def
+ *
+ * object obs extends parents { decls }
+ *
+ * gets expanded to
+ *
+ * <module> val obj = new obj$
+ * <module> class obj$ extends parents { this: obj.type => decls }
+ *
+ * (The following no longer applies:
+ * What's interesting here is that the block is well typed
+ * (because class obj$ is hoistable), but the type of the `obj` val is
+ * not expressible. What needs to happen in general when
+ * inferring the type of a val from its RHS, is: if the type contains
+ * a class that has the val itself as owner, then that class
+ * is remapped to have the val's owner as owner. Remapping could be
+ * done by cloning the class with the new owner and substituting
+ * everywhere in the tree. We know that remapping is safe
+ * because the only way a local class can appear in the RHS of a val is
+ * by being hoisted outside of a block, and the necessary checks are
+ * done at this point already.
+ *
+ * On the other hand, for method result type inference, if the type of
+ * the RHS of a method contains a class owned by the method, this would be
+ * an error.)
+ */
+ def ModuleDef(sym: TermSymbol, body: List[Tree])(implicit ctx: Context): tpd.Thicket = {
+ val modcls = sym.moduleClass.asClass
+ val constrSym = modcls.primaryConstructor orElse ctx.newDefaultConstructor(modcls).entered
+ val constr = DefDef(constrSym.asTerm, EmptyTree)
+ val clsdef = ClassDef(modcls, constr, body)
+ val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone)
+ Thicket(valdef, clsdef)
+ }
+
+ /** A `_' with given type */
+ def Underscore(tp: Type)(implicit ctx: Context) = untpd.Ident(nme.WILDCARD).withType(tp)
+
+ def defaultValue(tpe: Types.Type)(implicit ctx: Context) = {
+ val tpw = tpe.widen
+
+ if (tpw isRef defn.IntClass) Literal(Constant(0))
+ else if (tpw isRef defn.LongClass) Literal(Constant(0L))
+ else if (tpw isRef defn.BooleanClass) Literal(Constant(false))
+ else if (tpw isRef defn.CharClass) Literal(Constant('\u0000'))
+ else if (tpw isRef defn.FloatClass) Literal(Constant(0f))
+ else if (tpw isRef defn.DoubleClass) Literal(Constant(0d))
+ else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte))
+ else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort))
+ else Literal(Constant(null)).select(defn.Any_asInstanceOf).appliedToType(tpe)
+ }
+
+ private class FindLocalDummyAccumulator(cls: ClassSymbol)(implicit ctx: Context) extends TreeAccumulator[Symbol] {
+ def apply(sym: Symbol, tree: Tree)(implicit ctx: Context) =
+ if (sym.exists) sym
+ else if (tree.isDef) {
+ val owner = tree.symbol.owner
+ if (owner.isLocalDummy && owner.owner == cls) owner
+ else if (owner == cls) foldOver(sym, tree)
+ else sym
+ } else foldOver(sym, tree)
+ }
+
+ override val cpy = new TypedTreeCopier
+
+ class TypedTreeCopier extends TreeCopier {
+ def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] =
+ copied.withTypeUnchecked(tree.tpe)
+ def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] =
+ copied.withTypeUnchecked(tree.tpe)
+
+ override def Select(tree: Tree)(qualifier: Tree, name: Name)(implicit ctx: Context): Select = {
+ val tree1 = untpd.cpy.Select(tree)(qualifier, name)
+ tree match {
+ case tree: Select if qualifier.tpe eq tree.qualifier.tpe =>
+ tree1.withTypeUnchecked(tree.tpe)
+ case _ => tree.tpe match {
+ case tpe: NamedType => tree1.withType(tpe.derivedSelect(qualifier.tpe))
+ case _ => tree1.withTypeUnchecked(tree.tpe)
+ }
+ }
+ }
+
+ override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(implicit ctx: Context): Apply =
+ ta.assignType(untpd.cpy.Apply(tree)(fun, args), fun, args)
+ // Note: Reassigning the original type if `fun` and `args` have the same types as before
+ // does not work here: The computed type depends on the widened function type, not
+ // the function type itself. A treetransform may keep the function type the
+ // same but its widened type might change.
+
+ override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(implicit ctx: Context): TypeApply =
+ ta.assignType(untpd.cpy.TypeApply(tree)(fun, args), fun, args)
+ // Same remark as for Apply
+
+ override def Literal(tree: Tree)(const: Constant)(implicit ctx: Context): Literal =
+ ta.assignType(untpd.cpy.Literal(tree)(const))
+
+ override def New(tree: Tree)(tpt: Tree)(implicit ctx: Context): New =
+ ta.assignType(untpd.cpy.New(tree)(tpt), tpt)
+
+ override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed =
+ ta.assignType(untpd.cpy.Typed(tree)(expr, tpt), tpt)
+
+ override def NamedArg(tree: Tree)(name: Name, arg: Tree)(implicit ctx: Context): NamedArg =
+ ta.assignType(untpd.cpy.NamedArg(tree)(name, arg), arg)
+
+ override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(implicit ctx: Context): Assign =
+ ta.assignType(untpd.cpy.Assign(tree)(lhs, rhs))
+
+ override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(implicit ctx: Context): Block = {
+ val tree1 = untpd.cpy.Block(tree)(stats, expr)
+ tree match {
+ case tree: Block if expr.tpe eq tree.expr.tpe => tree1.withTypeUnchecked(tree.tpe)
+ case _ => ta.assignType(tree1, stats, expr)
+ }
+ }
+
+ override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(implicit ctx: Context): If = {
+ val tree1 = untpd.cpy.If(tree)(cond, thenp, elsep)
+ tree match {
+ case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) => tree1.withTypeUnchecked(tree.tpe)
+ case _ => ta.assignType(tree1, thenp, elsep)
+ }
+ }
+
+ override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(implicit ctx: Context): Closure =
+ ta.assignType(untpd.cpy.Closure(tree)(env, meth, tpt), meth, tpt)
+ // Same remark as for Apply
+
+ override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = {
+ val tree1 = untpd.cpy.Match(tree)(selector, cases)
+ tree match {
+ case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe)
+ case _ => ta.assignType(tree1, cases)
+ }
+ }
+
+ override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(implicit ctx: Context): CaseDef = {
+ val tree1 = untpd.cpy.CaseDef(tree)(pat, guard, body)
+ tree match {
+ case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe)
+ case _ => ta.assignType(tree1, body)
+ }
+ }
+
+ override def Return(tree: Tree)(expr: Tree, from: Tree)(implicit ctx: Context): Return =
+ ta.assignType(untpd.cpy.Return(tree)(expr, from))
+
+ override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit ctx: Context): Try = {
+ val tree1 = untpd.cpy.Try(tree)(expr, cases, finalizer)
+ tree match {
+ case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe)
+ case _ => ta.assignType(tree1, expr, cases)
+ }
+ }
+
+ override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): SeqLiteral = {
+ val tree1 = untpd.cpy.SeqLiteral(tree)(elems, elemtpt)
+ tree match {
+ case tree: SeqLiteral
+ if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) =>
+ tree1.withTypeUnchecked(tree.tpe)
+ case _ =>
+ ta.assignType(tree1, elems, elemtpt)
+ }
+ }
+
+ override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(implicit ctx: Context): Annotated = {
+ val tree1 = untpd.cpy.Annotated(tree)(arg, annot)
+ tree match {
+ case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe)
+ case _ => ta.assignType(tree1, arg, annot)
+ }
+ }
+
+ override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(implicit ctx: Context): If =
+ If(tree: Tree)(cond, thenp, elsep)
+ override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(implicit ctx: Context): Closure =
+ Closure(tree: Tree)(env, meth, tpt)
+ override def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(implicit ctx: Context): CaseDef =
+ CaseDef(tree: Tree)(pat, guard, body)
+ override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(implicit ctx: Context): Try =
+ Try(tree: Tree)(expr, cases, finalizer)
+ }
+
+ implicit class TreeOps[ThisTree <: tpd.Tree](val tree: ThisTree) extends AnyVal {
+
+ def isValue(implicit ctx: Context): Boolean =
+ tree.isTerm && tree.tpe.widen.isValueType
+
+ def isValueOrPattern(implicit ctx: Context) =
+ tree.isValue || tree.isPattern
+
+ def isValueType: Boolean =
+ tree.isType && tree.tpe.isValueType
+
+ def isInstantiation: Boolean = tree match {
+ case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
+ case _ => false
+ }
+
+ def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(implicit ctx: Context) =
+ new ShallowFolder(op).apply(z, tree)
+
+ def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(implicit ctx: Context) =
+ new DeepFolder(op).apply(z, tree)
+
+ def find[T](pred: (tpd.Tree) => Boolean)(implicit ctx: Context): Option[tpd.Tree] =
+ shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum)
+
+ def subst(from: List[Symbol], to: List[Symbol])(implicit ctx: Context): ThisTree =
+ new TreeTypeMap(substFrom = from, substTo = to).apply(tree)
+
+ /** Change owner from `from` to `to`. If `from` is a weak owner, also change its
+ * owner to `to`, and continue until a non-weak owner is reached.
+ */
+ def changeOwner(from: Symbol, to: Symbol)(implicit ctx: Context): ThisTree = {
+ def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = {
+ if (from.isWeakOwner && !from.owner.isClass)
+ loop(from.owner, from :: froms, to :: tos)
+ else {
+ //println(i"change owner ${from :: froms}%, % ==> $tos of $tree")
+ new TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree)
+ }
+ }
+ loop(from, Nil, to :: Nil)
+ }
+
+ /** After phase `trans`, set the owner of every definition in this tree that was formerly
+ * owner by `from` to `to`.
+ */
+ def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(implicit ctx: Context): ThisTree = {
+ assert(ctx.phase == trans.next)
+ val traverser = new TreeTraverser {
+ def traverse(tree: Tree)(implicit ctx: Context) = tree match {
+ case tree: DefTree =>
+ val sym = tree.symbol
+ val prevDenot = sym.denot(ctx.withPhase(trans))
+ if (prevDenot.owner == from) {
+ val d = sym.copySymDenotation(owner = to)
+ d.installAfter(trans)
+ d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d)
+ }
+ if (sym.isWeakOwner) traverseChildren(tree)
+ case _ =>
+ traverseChildren(tree)
+ }
+ }
+ traverser.traverse(tree)
+ tree
+ }
+
+ /** A select node with the given selector name and a computed type */
+ def select(name: Name)(implicit ctx: Context): Select =
+ Select(tree, name)
+
+ /** A select node with the given type */
+ def select(tp: NamedType)(implicit ctx: Context): Select =
+ untpd.Select(tree, tp.name).withType(tp)
+
+ /** A select node that selects the given symbol. Note: Need to make sure this
+ * is in fact the symbol you would get when you select with the symbol's name,
+ * otherwise a data race may occur which would be flagged by -Yno-double-bindings.
+ */
+ def select(sym: Symbol)(implicit ctx: Context): Select = {
+ val tp =
+ if (sym.isType)
+ TypeRef(tree.tpe, sym.name.asTypeName)
+ else
+ TermRef.withSigAndDenot(tree.tpe, sym.name.asTermName,
+ sym.signature, sym.denot.asSeenFrom(tree.tpe))
+ untpd.Select(tree, sym.name)
+ .withType(tp)
+ }
+
+ /** A select node with the given selector name and signature and a computed type */
+ def selectWithSig(name: Name, sig: Signature)(implicit ctx: Context): Tree =
+ untpd.SelectWithSig(tree, name, sig)
+ .withType(TermRef.withSig(tree.tpe, name.asTermName, sig))
+
+ /** A select node with selector name and signature taken from `sym`.
+ * Note: Use this method instead of select(sym) if the referenced symbol
+ * might be overridden in the type of the qualifier prefix. See note
+ * on select(sym: Symbol).
+ */
+ def selectWithSig(sym: Symbol)(implicit ctx: Context): Tree =
+ selectWithSig(sym.name, sym.signature)
+
+ /** A unary apply node with given argument: `tree(arg)` */
+ def appliedTo(arg: Tree)(implicit ctx: Context): Tree =
+ appliedToArgs(arg :: Nil)
+
+ /** An apply node with given arguments: `tree(arg, args0, ..., argsN)` */
+ def appliedTo(arg: Tree, args: Tree*)(implicit ctx: Context): Tree =
+ appliedToArgs(arg :: args.toList)
+
+ /** An apply node with given argument list `tree(args(0), ..., args(args.length - 1))` */
+ def appliedToArgs(args: List[Tree])(implicit ctx: Context): Apply =
+ Apply(tree, args)
+
+ /** The current tree applied to given argument lists:
+ * `tree (argss(0)) ... (argss(argss.length -1))`
+ */
+ def appliedToArgss(argss: List[List[Tree]])(implicit ctx: Context): Tree =
+ ((tree: Tree) /: argss)(Apply(_, _))
+
+ /** The current tree applied to (): `tree()` */
+ def appliedToNone(implicit ctx: Context): Apply = appliedToArgs(Nil)
+
+ /** The current tree applied to given type argument: `tree[targ]` */
+ def appliedToType(targ: Type)(implicit ctx: Context): Tree =
+ appliedToTypes(targ :: Nil)
+
+ /** The current tree applied to given type arguments: `tree[targ0, ..., targN]` */
+ def appliedToTypes(targs: List[Type])(implicit ctx: Context): Tree =
+ appliedToTypeTrees(targs map (TypeTree(_)))
+
+ /** The current tree applied to given type argument list: `tree[targs(0), ..., targs(targs.length - 1)]` */
+ def appliedToTypeTrees(targs: List[Tree])(implicit ctx: Context): Tree =
+ if (targs.isEmpty) tree else TypeApply(tree, targs)
+
+ /** Apply to `()` unless tree's widened type is parameterless */
+ def ensureApplied(implicit ctx: Context): Tree =
+ if (tree.tpe.widen.isParameterless) tree else tree.appliedToNone
+
+ /** `tree.isInstanceOf[tp]` */
+ def isInstance(tp: Type)(implicit ctx: Context): Tree =
+ tree.select(defn.Any_isInstanceOf).appliedToType(tp)
+
+ /** tree.asInstanceOf[`tp`] */
+ def asInstance(tp: Type)(implicit ctx: Context): Tree = {
+ assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]")
+ tree.select(defn.Any_asInstanceOf).appliedToType(tp)
+ }
+
+ /** `tree.asInstanceOf[tp]` (or its box/unbox/cast equivalent when after
+ * erasure and value and non-value types are mixed),
+ * unless tree's type already conforms to `tp`.
+ */
+ def ensureConforms(tp: Type)(implicit ctx: Context): Tree =
+ if (tree.tpe <:< tp) tree
+ else if (!ctx.erasedTypes) asInstance(tp)
+ else Erasure.Boxing.adaptToType(tree, tp)
+
+ /** If inititializer tree is `_', the default value of its type,
+ * otherwise the tree itself.
+ */
+ def wildcardToDefault(implicit ctx: Context) =
+ if (isWildcardArg(tree)) defaultValue(tree.tpe) else tree
+
+ /** `this && that`, for boolean trees `this`, `that` */
+ def and(that: Tree)(implicit ctx: Context): Tree =
+ tree.select(defn.Boolean_&&).appliedTo(that)
+
+ /** `this || that`, for boolean trees `this`, `that` */
+ def or(that: Tree)(implicit ctx: Context): Tree =
+ tree.select(defn.Boolean_||).appliedTo(that)
+
+ /** The translation of `tree = rhs`.
+ * This is either the tree as an assignment, to a setter call.
+ */
+ def becomes(rhs: Tree)(implicit ctx: Context): Tree =
+ if (tree.symbol is Method) {
+ val setr = tree match {
+ case Ident(_) =>
+ val setter = tree.symbol.setter
+ assert(setter.exists, tree.symbol.showLocated)
+ ref(tree.symbol.setter)
+ case Select(qual, _) => qual.select(tree.symbol.setter)
+ }
+ setr.appliedTo(rhs)
+ }
+ else Assign(tree, rhs)
+
+ // --- Higher order traversal methods -------------------------------
+
+ /** Apply `f` to each subtree of this tree */
+ def foreachSubTree(f: Tree => Unit)(implicit ctx: Context): Unit = {
+ val traverser = new TreeTraverser {
+ def traverse(tree: Tree)(implicit ctx: Context) = foldOver(f(tree), tree)
+ }
+ traverser.traverse(tree)
+ }
+
+ /** Is there a subtree of this tree that satisfies predicate `p`? */
+ def existsSubTree(p: Tree => Boolean)(implicit ctx: Context): Boolean = {
+ val acc = new TreeAccumulator[Boolean] {
+ def apply(x: Boolean, t: Tree)(implicit ctx: Context) = x || p(t) || foldOver(x, t)
+ }
+ acc(false, tree)
+ }
+
+ /** All subtrees of this tree that satisfy predicate `p`. */
+ def filterSubTrees(f: Tree => Boolean)(implicit ctx: Context): List[Tree] = {
+ val buf = new mutable.ListBuffer[Tree]
+ foreachSubTree { tree => if (f(tree)) buf += tree }
+ buf.toList
+ }
+ }
+
+ implicit class ListOfTreeDecorator(val xs: List[tpd.Tree]) extends AnyVal {
+ def tpes: List[Type] = xs map (_.tpe)
+ }
+
+ // convert a numeric with a toXXX method
+ def primitiveConversion(tree: Tree, numericCls: Symbol)(implicit ctx: Context): Tree = {
+ val mname = ("to" + numericCls.name).toTermName
+ val conversion = tree.tpe member mname
+ if (conversion.symbol.exists)
+ tree.select(conversion.symbol.termRef).ensureApplied
+ else if (tree.tpe.widen isRef numericCls)
+ tree
+ else {
+ ctx.warning(i"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.")
+ Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)) withPos tree.pos
+ }
+ }
+
+ /** A tree that represents the class of the erasure of type `tp`. */
+ def clsOf(tp: Type)(implicit ctx: Context): Tree = {
+ def TYPE(module: TermSymbol) = ref(module).select(nme.TYPE_)
+ defn.scalaClassName(tp) match {
+ case tpnme.Boolean => TYPE(defn.BoxedBooleanModule)
+ case tpnme.Byte => TYPE(defn.BoxedByteModule)
+ case tpnme.Short => TYPE(defn.BoxedShortModule)
+ case tpnme.Char => TYPE(defn.BoxedCharModule)
+ case tpnme.Int => TYPE(defn.BoxedIntModule)
+ case tpnme.Long => TYPE(defn.BoxedLongModule)
+ case tpnme.Float => TYPE(defn.BoxedFloatModule)
+ case tpnme.Double => TYPE(defn.BoxedDoubleModule)
+ case tpnme.Unit => TYPE(defn.BoxedUnitModule)
+ case _ =>
+ if(ctx.erasedTypes || !tp.derivesFrom(defn.ArrayClass))
+ Literal(Constant(TypeErasure.erasure(tp)))
+ else Literal(Constant(tp))
+ }
+ }
+
+ def applyOverloaded(receiver: Tree, method: TermName, args: List[Tree], targs: List[Type], expectedType: Type, isAnnotConstructor: Boolean = false)(implicit ctx: Context): Tree = {
+ val typer = ctx.typer
+ val proto = new FunProtoTyped(args, expectedType, typer)
+ val denot = receiver.tpe.member(method)
+ assert(denot.exists, i"no member $receiver . $method, members = ${receiver.tpe.decls}")
+ val selected =
+ if (denot.isOverloaded) {
+ def typeParamCount(tp: Type) = tp.widen match {
+ case tp: PolyType => tp.paramBounds.length
+ case _ => 0
+ }
+ var allAlts = denot.alternatives
+ .map(_.termRef).filter(tr => typeParamCount(tr) == targs.length)
+ if (targs.isEmpty) allAlts = allAlts.filterNot(_.widen.isInstanceOf[PolyType])
+ val alternatives = ctx.typer.resolveOverloaded(allAlts, proto)
+ assert(alternatives.size == 1,
+ i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " +
+ i"$method on ${receiver.tpe.widenDealias} with targs: $targs%, %; args: $args%, % of types ${args.tpes}%, %; expectedType: $expectedType." +
+ i" isAnnotConstructor = $isAnnotConstructor.\n" +
+ i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" +
+ i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it
+ alternatives.head
+ }
+ else denot.asSingleDenotation.termRef
+ val fun = receiver
+ .select(TermRef.withSig(receiver.tpe, selected.termSymbol.asTerm))
+ .appliedToTypes(targs)
+
+ def adaptLastArg(lastParam: Tree, expectedType: Type) = {
+ if (isAnnotConstructor && !(lastParam.tpe <:< expectedType)) {
+ val defn = ctx.definitions
+ val prefix = args.take(selected.widen.paramTypess.head.size - 1)
+ expectedType match {
+ case defn.ArrayOf(el) =>
+ lastParam.tpe match {
+ case defn.ArrayOf(el2) if el2 <:< el =>
+ // we have a JavaSeqLiteral with a more precise type
+ // we cannot construct a tree as JavaSeqLiteral infered to precise type
+ // if we add typed than it would be both type-correct and
+ // will pass Ycheck
+ prefix ::: List(tpd.Typed(lastParam, TypeTree(defn.ArrayOf(el))))
+ case _ =>
+ ???
+ }
+ case _ => args
+ }
+ } else args
+ }
+
+ val callArgs: List[Tree] = if (args.isEmpty) Nil else {
+ val expectedType = selected.widen.paramTypess.head.last
+ val lastParam = args.last
+ adaptLastArg(lastParam, expectedType)
+ }
+
+ val apply = untpd.Apply(fun, callArgs)
+ new typer.ApplyToTyped(apply, fun, selected, callArgs, expectedType).result.asInstanceOf[Tree] // needed to handle varargs
+ }
+
+ @tailrec
+ def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = {
+ if (trees.isEmpty) trees.isEmpty
+ else if (trees1.isEmpty) trees.isEmpty
+ else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail)
+ }
+
+ def evalOnce(tree: Tree)(within: Tree => Tree)(implicit ctx: Context) = {
+ if (isIdempotentExpr(tree)) within(tree)
+ else {
+ val vdef = SyntheticValDef(ctx.freshName("ev$").toTermName, tree)
+ Block(vdef :: Nil, within(Ident(vdef.namedType)))
+ }
+ }
+
+ def runtimeCall(name: TermName, args: List[Tree])(implicit ctx: Context): Tree = {
+ Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToArgs(args)
+ }
+
+ /** An extractor that pulls out type arguments */
+ object MaybePoly {
+ def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
+ case TypeApply(tree, targs) => Some(tree, targs)
+ case _ => Some(tree, Nil)
+ }
+ }
+
+ /** A traverser that passes the enclosing class or method as an argument
+ * to the traverse method.
+ */
+ abstract class EnclosingMethodTraverser extends TreeAccumulator[Symbol] {
+ def traverse(enclMeth: Symbol, tree: Tree)(implicit ctx: Context): Unit
+ def apply(enclMeth: Symbol, tree: Tree)(implicit ctx: Context) = {
+ tree match {
+ case _: DefTree if tree.symbol.exists =>
+ traverse(tree.symbol.enclosingMethod, tree)
+ case _ =>
+ traverse(enclMeth, tree)
+ }
+ enclMeth
+ }
+ }
+
+ /** A key to be used in a context property that tracks enclosing inlined calls */
+ private val InlinedCalls = new Property.Key[List[Tree]]
+
+ /** A context derived form `ctx` that records `call` as innermost enclosing
+ * call for which the inlined version is currently processed.
+ */
+ def inlineContext(call: Tree)(implicit ctx: Context): Context =
+ ctx.fresh.setProperty(InlinedCalls, call :: enclosingInlineds)
+
+ /** All enclosing calls that are currently inlined, from innermost to outermost */
+ def enclosingInlineds(implicit ctx: Context): List[Tree] =
+ ctx.property(InlinedCalls).getOrElse(Nil)
+
+ /** The source file where the symbol of the `@inline` method referred to by `call`
+ * is defined
+ */
+ def sourceFile(call: Tree)(implicit ctx: Context) = {
+ val file = call.symbol.sourceFile
+ val encoding = ctx.settings.encoding.value
+ if (file != null && file.exists) new SourceFile(file, Codec(encoding)) else NoSource
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala
new file mode 100644
index 000000000..6c5210287
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala
@@ -0,0 +1,562 @@
+package dotty.tools
+package dotc
+package ast
+
+import core._
+import util.Positions._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._
+import Denotations._, SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
+import Decorators._
+import util.Property
+import language.higherKinds
+import collection.mutable.ListBuffer
+
+object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
+
+ // ----- Tree cases that exist in untyped form only ------------------
+
+ trait OpTree extends Tree {
+ def op: Name
+ override def isTerm = op.isTermName
+ override def isType = op.isTypeName
+ }
+
+ /** A typed subtree of an untyped tree needs to be wrapped in a TypedSlice
+ * @param owner The current owner at the time the tree was defined
+ */
+ abstract case class TypedSplice(tree: tpd.Tree)(val owner: Symbol) extends ProxyTree {
+ def forwardTo = tree
+ }
+
+ object TypedSplice {
+ def apply(tree: tpd.Tree)(implicit ctx: Context): TypedSplice =
+ new TypedSplice(tree)(ctx.owner) {}
+ }
+
+ /** mods object name impl */
+ case class ModuleDef(name: TermName, impl: Template)
+ extends MemberDef {
+ type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef
+ def withName(name: Name)(implicit ctx: Context) = cpy.ModuleDef(this)(name.toTermName, impl)
+ }
+
+ case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree) extends TermTree
+
+ case class SymbolLit(str: String) extends TermTree
+
+ /** An interpolated string
+ * @param segments a list of two element tickets consisting of string literal and argument tree,
+ * possibly with a simple string literal as last element of the list
+ */
+ case class InterpolatedString(id: TermName, segments: List[Tree]) extends TermTree
+
+ case class Function(args: List[Tree], body: Tree) extends Tree {
+ override def isTerm = body.isTerm
+ override def isType = body.isType
+ }
+ /** A function created from a wildcard expression
+ * @param placeHolderParams a list of definitions of synthetic parameters
+ * @param body the function body where wildcards are replaced by
+ * references to synthetic parameters.
+ */
+ class WildcardFunction(placeholderParams: List[ValDef], body: Tree) extends Function(placeholderParams, body)
+
+ case class InfixOp(left: Tree, op: Name, right: Tree) extends OpTree
+ case class PostfixOp(od: Tree, op: Name) extends OpTree
+ case class PrefixOp(op: Name, od: Tree) extends OpTree
+ case class Parens(t: Tree) extends ProxyTree {
+ def forwardTo = t
+ }
+ case class Tuple(trees: List[Tree]) extends Tree {
+ override def isTerm = trees.isEmpty || trees.head.isTerm
+ override def isType = !isTerm
+ }
+ case class Throw(expr: Tree) extends TermTree
+ case class WhileDo(cond: Tree, body: Tree) extends TermTree
+ case class DoWhile(body: Tree, cond: Tree) extends TermTree
+ case class ForYield(enums: List[Tree], expr: Tree) extends TermTree
+ case class ForDo(enums: List[Tree], body: Tree) extends TermTree
+ case class GenFrom(pat: Tree, expr: Tree) extends Tree
+ case class GenAlias(pat: Tree, expr: Tree) extends Tree
+ case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree]) extends TypTree
+ case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree) extends DefTree
+
+ @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY) with WithoutTypeOrPos[Untyped] {
+ override def isEmpty = true
+ }
+
+ /** A block arising from a right-associative infix operation, where, e.g.
+ *
+ * a +: b
+ *
+ * is expanded to
+ *
+ * { val x = a; b.+:(x) }
+ */
+ class InfixOpBlock(leftOperand: Tree, rightOp: Tree) extends Block(leftOperand :: Nil, rightOp)
+
+ // ----- Modifiers -----------------------------------------------------
+ /** Mod is intended to record syntactic information about modifiers, it's
+ * NOT a replacement of FlagSet.
+ *
+ * For any query about semantic information, check `flags` instead.
+ */
+ sealed abstract class Mod(val flags: FlagSet) extends Positioned
+
+ object Mod {
+ case class Private() extends Mod(Flags.Private)
+
+ case class Protected() extends Mod(Flags.Protected)
+
+ case class Val() extends Mod(Flags.EmptyFlags)
+
+ case class Var() extends Mod(Flags.Mutable)
+
+ case class Implicit(flag: FlagSet = Flags.ImplicitCommon) extends Mod(flag)
+
+ case class Final() extends Mod(Flags.Final)
+
+ case class Sealed() extends Mod(Flags.Sealed)
+
+ case class Override() extends Mod(Flags.Override)
+
+ case class Abstract() extends Mod(Flags.Abstract)
+
+ case class Lazy() extends Mod(Flags.Lazy)
+
+ case class Inline() extends Mod(Flags.Inline)
+
+ case class Type() extends Mod(Flags.EmptyFlags)
+ }
+
+ /** Modifiers and annotations for definitions
+ *
+ * @param flags The set flags
+ * @param privateWithin If a private or protected has is followed by a
+ * qualifier [q], the name q, "" as a typename otherwise.
+ * @param annotations The annotations preceding the modifiers
+ */
+ case class Modifiers (
+ flags: FlagSet = EmptyFlags,
+ privateWithin: TypeName = tpnme.EMPTY,
+ annotations: List[Tree] = Nil,
+ mods: List[Mod] = Nil) extends Positioned with Cloneable {
+
+ def is(fs: FlagSet): Boolean = flags is fs
+ def is(fc: FlagConjunction): Boolean = flags is fc
+
+ def | (fs: FlagSet): Modifiers = withFlags(flags | fs)
+ def & (fs: FlagSet): Modifiers = withFlags(flags & fs)
+ def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs)
+
+ def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags)
+ def toTermFlags: Modifiers = withFlags(flags.toTermFlags)
+
+ def withFlags(flags: FlagSet) =
+ if (this.flags == flags) this
+ else copy(flags = flags)
+
+ def withAddedMod(mod: Mod): Modifiers =
+ if (mods.exists(_ eq mod)) this
+ else withMods(mods :+ mod)
+
+ def withMods(ms: List[Mod]): Modifiers =
+ if (mods eq ms) this
+ else copy(mods = ms)
+
+ def withAddedAnnotation(annot: Tree): Modifiers =
+ if (annotations.exists(_ eq annot)) this
+ else withAnnotations(annotations :+ annot)
+
+ def withAnnotations(annots: List[Tree]): Modifiers =
+ if (annots eq annotations) this
+ else copy(annotations = annots)
+
+ def withPrivateWithin(pw: TypeName) =
+ if (pw.isEmpty) this
+ else copy(privateWithin = pw)
+
+ def hasFlags = flags != EmptyFlags
+ def hasAnnotations = annotations.nonEmpty
+ def hasPrivateWithin = privateWithin != tpnme.EMPTY
+ }
+
+ @sharable val EmptyModifiers: Modifiers = new Modifiers()
+
+ // ----- TypeTrees that refer to other tree's symbols -------------------
+
+ /** A type tree that gets its type from some other tree's symbol. Enters the
+ * type tree in the References attachment of the `from` tree as a side effect.
+ */
+ abstract class DerivedTypeTree extends TypeTree {
+
+ private var myWatched: Tree = EmptyTree
+
+ /** The watched tree; used only for printing */
+ def watched: Tree = myWatched
+
+ /** Install the derived type tree as a dependency on `original` */
+ def watching(original: DefTree): this.type = {
+ myWatched = original
+ val existing = original.attachmentOrElse(References, Nil)
+ original.putAttachment(References, this :: existing)
+ this
+ }
+
+ /** A hook to ensure that all necessary symbols are completed so that
+ * OriginalSymbol attachments are propagated to this tree
+ */
+ def ensureCompletions(implicit ctx: Context): Unit = ()
+
+ /** The method that computes the type of this tree */
+ def derivedType(originalSym: Symbol)(implicit ctx: Context): Type
+ }
+
+ /** Property key containing TypeTrees whose type is computed
+ * from the symbol in this type. These type trees have marker trees
+ * TypeRefOfSym or InfoOfSym as their originals.
+ */
+ val References = new Property.Key[List[Tree]]
+
+ /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym
+ * which contains the symbol of the original tree from which this
+ * TypeTree is derived.
+ */
+ val OriginalSymbol = new Property.Key[Symbol]
+
+ // ------ Creation methods for untyped only -----------------
+
+ def Ident(name: Name): Ident = new Ident(name)
+ def BackquotedIdent(name: Name): BackquotedIdent = new BackquotedIdent(name)
+ def Select(qualifier: Tree, name: Name): Select = new Select(qualifier, name)
+ def SelectWithSig(qualifier: Tree, name: Name, sig: Signature): Select = new SelectWithSig(qualifier, name, sig)
+ def This(qual: Ident): This = new This(qual)
+ def Super(qual: Tree, mix: Ident): Super = new Super(qual, mix)
+ def Apply(fun: Tree, args: List[Tree]): Apply = new Apply(fun, args)
+ def TypeApply(fun: Tree, args: List[Tree]): TypeApply = new TypeApply(fun, args)
+ def Literal(const: Constant): Literal = new Literal(const)
+ def New(tpt: Tree): New = new New(tpt)
+ def Typed(expr: Tree, tpt: Tree): Typed = new Typed(expr, tpt)
+ def NamedArg(name: Name, arg: Tree): NamedArg = new NamedArg(name, arg)
+ def Assign(lhs: Tree, rhs: Tree): Assign = new Assign(lhs, rhs)
+ def Block(stats: List[Tree], expr: Tree): Block = new Block(stats, expr)
+ def If(cond: Tree, thenp: Tree, elsep: Tree): If = new If(cond, thenp, elsep)
+ def Closure(env: List[Tree], meth: Tree, tpt: Tree): Closure = new Closure(env, meth, tpt)
+ def Match(selector: Tree, cases: List[CaseDef]): Match = new Match(selector, cases)
+ def CaseDef(pat: Tree, guard: Tree, body: Tree): CaseDef = new CaseDef(pat, guard, body)
+ def Return(expr: Tree, from: Tree): Return = new Return(expr, from)
+ def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree): Try = new Try(expr, cases, finalizer)
+ def SeqLiteral(elems: List[Tree], elemtpt: Tree): SeqLiteral = new SeqLiteral(elems, elemtpt)
+ def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt)
+ def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree): Inlined = new Inlined(call, bindings, expansion)
+ def TypeTree() = new TypeTree()
+ def SingletonTypeTree(ref: Tree): SingletonTypeTree = new SingletonTypeTree(ref)
+ def AndTypeTree(left: Tree, right: Tree): AndTypeTree = new AndTypeTree(left, right)
+ def OrTypeTree(left: Tree, right: Tree): OrTypeTree = new OrTypeTree(left, right)
+ def RefinedTypeTree(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = new RefinedTypeTree(tpt, refinements)
+ def AppliedTypeTree(tpt: Tree, args: List[Tree]): AppliedTypeTree = new AppliedTypeTree(tpt, args)
+ def PolyTypeTree(tparams: List[TypeDef], body: Tree): PolyTypeTree = new PolyTypeTree(tparams, body)
+ def ByNameTypeTree(result: Tree): ByNameTypeTree = new ByNameTypeTree(result)
+ def TypeBoundsTree(lo: Tree, hi: Tree): TypeBoundsTree = new TypeBoundsTree(lo, hi)
+ def Bind(name: Name, body: Tree): Bind = new Bind(name, body)
+ def Alternative(trees: List[Tree]): Alternative = new Alternative(trees)
+ def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree]): UnApply = new UnApply(fun, implicits, patterns)
+ def ValDef(name: TermName, tpt: Tree, rhs: LazyTree): ValDef = new ValDef(name, tpt, rhs)
+ def DefDef(name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: LazyTree): DefDef = new DefDef(name, tparams, vparamss, tpt, rhs)
+ def TypeDef(name: TypeName, rhs: Tree): TypeDef = new TypeDef(name, rhs)
+ def Template(constr: DefDef, parents: List[Tree], self: ValDef, body: LazyTreeList): Template = new Template(constr, parents, self, body)
+ def Import(expr: Tree, selectors: List[untpd.Tree]): Import = new Import(expr, selectors)
+ def PackageDef(pid: RefTree, stats: List[Tree]): PackageDef = new PackageDef(pid, stats)
+ def Annotated(arg: Tree, annot: Tree): Annotated = new Annotated(arg, annot)
+
+ // ------ Additional creation methods for untyped only -----------------
+
+ // def TypeTree(tpe: Type): TypeTree = TypeTree().withType(tpe) todo: move to untpd/tpd
+
+ /** new pre.C[Ts](args1)...(args_n)
+ * ==>
+ * (new pre.C).<init>[Ts](args1)...(args_n)
+ */
+ def New(tpt: Tree, argss: List[List[Tree]])(implicit ctx: Context): Tree = {
+ val (tycon, targs) = tpt match {
+ case AppliedTypeTree(tycon, targs) =>
+ (tycon, targs)
+ case TypedSplice(AppliedTypeTree(tycon, targs)) =>
+ (TypedSplice(tycon), targs map (TypedSplice(_)))
+ case TypedSplice(tpt1: Tree) =>
+ val argTypes = tpt1.tpe.argTypesLo
+ val tycon = tpt1.tpe.withoutArgs(argTypes)
+ def wrap(tpe: Type) = TypeTree(tpe) withPos tpt.pos
+ (wrap(tycon), argTypes map wrap)
+ case _ =>
+ (tpt, Nil)
+ }
+ var prefix: Tree = Select(New(tycon), nme.CONSTRUCTOR)
+ if (targs.nonEmpty) prefix = TypeApply(prefix, targs)
+ ensureApplied((prefix /: argss)(Apply(_, _)))
+ }
+
+ def Block(stat: Tree, expr: Tree): Block =
+ Block(stat :: Nil, expr)
+
+ def Apply(fn: Tree, arg: Tree): Apply =
+ Apply(fn, arg :: Nil)
+
+ def ensureApplied(tpt: Tree) = tpt match {
+ case _: Apply => tpt
+ case _ => Apply(tpt, Nil)
+ }
+
+ def AppliedTypeTree(tpt: Tree, arg: Tree): AppliedTypeTree =
+ AppliedTypeTree(tpt, arg :: Nil)
+
+ def TypeTree(tpe: Type)(implicit ctx: Context): TypedSplice = TypedSplice(TypeTree().withTypeUnchecked(tpe))
+
+ def unitLiteral = Literal(Constant(()))
+
+ def ref(tp: NamedType)(implicit ctx: Context): Tree =
+ TypedSplice(tpd.ref(tp))
+
+ def rootDot(name: Name) = Select(Ident(nme.ROOTPKG), name)
+ def scalaDot(name: Name) = Select(rootDot(nme.scala_), name)
+ def scalaUnit = scalaDot(tpnme.Unit)
+ def scalaAny = scalaDot(tpnme.Any)
+
+ def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(implicit ctx: Context): DefDef =
+ DefDef(nme.CONSTRUCTOR, tparams, vparamss, TypeTree(), rhs)
+
+ def emptyConstructor(implicit ctx: Context): DefDef =
+ makeConstructor(Nil, Nil)
+
+ def makeSelfDef(name: TermName, tpt: Tree)(implicit ctx: Context) =
+ ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal)
+
+ def makeTupleOrParens(ts: List[Tree])(implicit ctx: Context) = ts match {
+ case t :: Nil => Parens(t)
+ case _ => Tuple(ts)
+ }
+
+ def makeTuple(ts: List[Tree])(implicit ctx: Context) = ts match {
+ case t :: Nil => t
+ case _ => Tuple(ts)
+ }
+
+ def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers = EmptyModifiers)(implicit ctx: Context): ValDef =
+ ValDef(pname, tpe, EmptyTree).withMods(mods | Param)
+
+ def makeSyntheticParameter(n: Int = 1, tpt: Tree = TypeTree())(implicit ctx: Context): ValDef =
+ ValDef(nme.syntheticParamName(n), tpt, EmptyTree).withFlags(SyntheticTermParam)
+
+ def lambdaAbstract(tparams: List[TypeDef], tpt: Tree)(implicit ctx: Context) =
+ if (tparams.isEmpty) tpt else PolyTypeTree(tparams, tpt)
+
+ /** A reference to given definition. If definition is a repeated
+ * parameter, the reference will be a repeated argument.
+ */
+ def refOfDef(tree: MemberDef)(implicit ctx: Context) = tree match {
+ case ValDef(_, PostfixOp(_, nme.raw.STAR), _) => repeated(Ident(tree.name))
+ case _ => Ident(tree.name)
+ }
+
+ /** A repeated argument such as `arg: _*` */
+ def repeated(arg: Tree)(implicit ctx: Context) = Typed(arg, Ident(tpnme.WILDCARD_STAR))
+
+// ----- Accessing modifiers ----------------------------------------------------
+
+ abstract class ModsDecorator { def mods: Modifiers }
+
+ implicit class modsDeco(val mdef: MemberDef)(implicit ctx: Context) {
+ def mods = mdef.rawMods
+ }
+
+// --------- Copier/Transformer/Accumulator classes for untyped trees -----
+
+ override val cpy: UntypedTreeCopier = new UntypedTreeCopier
+
+ class UntypedTreeCopier extends TreeCopier {
+
+ def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] =
+ copied.asInstanceOf[copied.ThisTree[Untyped]]
+
+ def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = {
+ tree match {
+ case tree: MemberDef => copied.withMods(tree.rawMods)
+ case _ => copied
+ }
+ }.asInstanceOf[copied.ThisTree[Untyped]]
+
+ def ModuleDef(tree: Tree)(name: TermName, impl: Template) = tree match {
+ case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree
+ case _ => untpd.ModuleDef(name, impl).withPos(tree.pos)
+ }
+ def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree) = tree match {
+ case tree: ParsedTry
+ if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree
+ case _ => untpd.ParsedTry(expr, handler, finalizer).withPos(tree.pos)
+ }
+ def SymbolLit(tree: Tree)(str: String) = tree match {
+ case tree: SymbolLit if str == tree.str => tree
+ case _ => untpd.SymbolLit(str).withPos(tree.pos)
+ }
+ def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree]) = tree match {
+ case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree
+ case _ => untpd.InterpolatedString(id, segments).withPos(tree.pos)
+ }
+ def Function(tree: Tree)(args: List[Tree], body: Tree) = tree match {
+ case tree: Function if (args eq tree.args) && (body eq tree.body) => tree
+ case _ => untpd.Function(args, body).withPos(tree.pos)
+ }
+ def InfixOp(tree: Tree)(left: Tree, op: Name, right: Tree) = tree match {
+ case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree
+ case _ => untpd.InfixOp(left, op, right).withPos(tree.pos)
+ }
+ def PostfixOp(tree: Tree)(od: Tree, op: Name) = tree match {
+ case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree
+ case _ => untpd.PostfixOp(od, op).withPos(tree.pos)
+ }
+ def PrefixOp(tree: Tree)(op: Name, od: Tree) = tree match {
+ case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree
+ case _ => untpd.PrefixOp(op, od).withPos(tree.pos)
+ }
+ def Parens(tree: Tree)(t: Tree) = tree match {
+ case tree: Parens if t eq tree.t => tree
+ case _ => untpd.Parens(t).withPos(tree.pos)
+ }
+ def Tuple(tree: Tree)(trees: List[Tree]) = tree match {
+ case tree: Tuple if trees eq tree.trees => tree
+ case _ => untpd.Tuple(trees).withPos(tree.pos)
+ }
+ def Throw(tree: Tree)(expr: Tree) = tree match {
+ case tree: Throw if expr eq tree.expr => tree
+ case _ => untpd.Throw(expr).withPos(tree.pos)
+ }
+ def WhileDo(tree: Tree)(cond: Tree, body: Tree) = tree match {
+ case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree
+ case _ => untpd.WhileDo(cond, body).withPos(tree.pos)
+ }
+ def DoWhile(tree: Tree)(body: Tree, cond: Tree) = tree match {
+ case tree: DoWhile if (body eq tree.body) && (cond eq tree.cond) => tree
+ case _ => untpd.DoWhile(body, cond).withPos(tree.pos)
+ }
+ def ForYield(tree: Tree)(enums: List[Tree], expr: Tree) = tree match {
+ case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree
+ case _ => untpd.ForYield(enums, expr).withPos(tree.pos)
+ }
+ def ForDo(tree: Tree)(enums: List[Tree], body: Tree) = tree match {
+ case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree
+ case _ => untpd.ForDo(enums, body).withPos(tree.pos)
+ }
+ def GenFrom(tree: Tree)(pat: Tree, expr: Tree) = tree match {
+ case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) => tree
+ case _ => untpd.GenFrom(pat, expr).withPos(tree.pos)
+ }
+ def GenAlias(tree: Tree)(pat: Tree, expr: Tree) = tree match {
+ case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree
+ case _ => untpd.GenAlias(pat, expr).withPos(tree.pos)
+ }
+ def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree]) = tree match {
+ case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree
+ case _ => untpd.ContextBounds(bounds, cxBounds).withPos(tree.pos)
+ }
+ def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree) = tree match {
+ case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree
+ case _ => untpd.PatDef(mods, pats, tpt, rhs).withPos(tree.pos)
+ }
+ }
+
+ abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) {
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case ModuleDef(name, impl) =>
+ cpy.ModuleDef(tree)(name, transformSub(impl))
+ case ParsedTry(expr, handler, finalizer) =>
+ cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer))
+ case SymbolLit(str) =>
+ cpy.SymbolLit(tree)(str)
+ case InterpolatedString(id, segments) =>
+ cpy.InterpolatedString(tree)(id, transform(segments))
+ case Function(args, body) =>
+ cpy.Function(tree)(transform(args), transform(body))
+ case InfixOp(left, op, right) =>
+ cpy.InfixOp(tree)(transform(left), op, transform(right))
+ case PostfixOp(od, op) =>
+ cpy.PostfixOp(tree)(transform(od), op)
+ case PrefixOp(op, od) =>
+ cpy.PrefixOp(tree)(op, transform(od))
+ case Parens(t) =>
+ cpy.Parens(tree)(transform(t))
+ case Tuple(trees) =>
+ cpy.Tuple(tree)(transform(trees))
+ case Throw(expr) =>
+ cpy.Throw(tree)(transform(expr))
+ case WhileDo(cond, body) =>
+ cpy.WhileDo(tree)(transform(cond), transform(body))
+ case DoWhile(body, cond) =>
+ cpy.DoWhile(tree)(transform(body), transform(cond))
+ case ForYield(enums, expr) =>
+ cpy.ForYield(tree)(transform(enums), transform(expr))
+ case ForDo(enums, body) =>
+ cpy.ForDo(tree)(transform(enums), transform(body))
+ case GenFrom(pat, expr) =>
+ cpy.GenFrom(tree)(transform(pat), transform(expr))
+ case GenAlias(pat, expr) =>
+ cpy.GenAlias(tree)(transform(pat), transform(expr))
+ case ContextBounds(bounds, cxBounds) =>
+ cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds))
+ case PatDef(mods, pats, tpt, rhs) =>
+ cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs))
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+ abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] {
+ override def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = tree match {
+ case ModuleDef(name, impl) =>
+ this(x, impl)
+ case ParsedTry(expr, handler, finalizer) =>
+ this(this(this(x, expr), handler), finalizer)
+ case SymbolLit(str) =>
+ x
+ case InterpolatedString(id, segments) =>
+ this(x, segments)
+ case Function(args, body) =>
+ this(this(x, args), body)
+ case InfixOp(left, op, right) =>
+ this(this(x, left), right)
+ case PostfixOp(od, op) =>
+ this(x, od)
+ case PrefixOp(op, od) =>
+ this(x, od)
+ case Parens(t) =>
+ this(x, t)
+ case Tuple(trees) =>
+ this(x, trees)
+ case Throw(expr) =>
+ this(x, expr)
+ case WhileDo(cond, body) =>
+ this(this(x, cond), body)
+ case DoWhile(body, cond) =>
+ this(this(x, body), cond)
+ case ForYield(enums, expr) =>
+ this(this(x, enums), expr)
+ case ForDo(enums, body) =>
+ this(this(x, enums), body)
+ case GenFrom(pat, expr) =>
+ this(this(x, pat), expr)
+ case GenAlias(pat, expr) =>
+ this(this(x, pat), expr)
+ case ContextBounds(bounds, cxBounds) =>
+ this(this(x, bounds), cxBounds)
+ case PatDef(mods, pats, tpt, rhs) =>
+ this(this(this(x, pats), tpt), rhs)
+ case TypedSplice(tree) =>
+ this(x, tree)
+ case _ =>
+ super.foldOver(x, tree)
+ }
+ }
+
+ /** Fold `f` over all tree nodes, in depth-first, prefix order */
+ class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] {
+ def apply(x: X, tree: Tree)(implicit ctx: Context): X = foldOver(f(x, tree), tree)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala
new file mode 100644
index 000000000..19ede3cec
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala
@@ -0,0 +1,128 @@
+
+package dotty.tools.dotc
+package config
+
+import java.io.File
+import Settings._
+import core.Contexts._
+import util.DotClass
+import Properties._
+
+object CompilerCommand extends DotClass {
+
+ /** The name of the command */
+ def cmdName = "scalac"
+
+ private def explainAdvanced = """
+ |-- Notes on option parsing --
+ |Boolean settings are always false unless set.
+ |Where multiple values are accepted, they should be comma-separated.
+ | example: -Xplugin:plugin1,plugin2
+ |<phases> means one or a comma-separated list of:
+ | - (partial) phase names with an optional "+" suffix to include the next phase
+ | - the string "all"
+ | example: -Xprint:all prints all phases.
+ | example: -Xprint:front,mixin prints the frontend and mixin phases.
+ | example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase.
+ | This is useful because during the tree transform of phase X, we often
+ | already are in phase X + 1.
+ """
+
+ def shortUsage = s"Usage: $cmdName <options> <source files>"
+
+ def versionMsg = s"Dotty compiler $versionString -- $copyrightString"
+
+ /** Distill arguments into summary detailing settings, errors and files to compiler */
+ def distill(args: Array[String])(implicit ctx: Context): ArgsSummary = {
+ /**
+ * Expands all arguments starting with @ to the contents of the
+ * file named like each argument.
+ */
+ def expandArg(arg: String): List[String] = unsupported("expandArg")/*{
+ def stripComment(s: String) = s takeWhile (_ != '#')
+ val file = File(arg stripPrefix "@")
+ if (!file.exists)
+ throw new java.io.FileNotFoundException("argument file %s could not be found" format file.name)
+
+ settings splitParams (file.lines() map stripComment mkString " ")
+ }*/
+
+ // expand out @filename to the contents of that filename
+ def expandedArguments = args.toList flatMap {
+ case x if x startsWith "@" => expandArg(x)
+ case x => List(x)
+ }
+
+ ctx.settings.processArguments(expandedArguments, processAll = true)
+ }
+
+ /** Provide usage feedback on argument summary, assuming that all settings
+ * are already applied in context.
+ * @return The list of files to compile.
+ */
+ def checkUsage(summary: ArgsSummary, sourcesRequired: Boolean)(implicit ctx: Context): List[String] = {
+ val settings = ctx.settings
+
+ /** Creates a help message for a subset of options based on cond */
+ def availableOptionsMsg(cond: Setting[_] => Boolean): String = {
+ val ss = (ctx.settings.allSettings filter cond).toList sortBy (_.name)
+ val width = (ss map (_.name.length)).max
+ def format(s: String) = ("%-" + width + "s") format s
+ def helpStr(s: Setting[_]) = s"${format(s.name)} ${s.description}"
+ ss map helpStr mkString "\n"
+ }
+
+ def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting[_] => Boolean): String = {
+ val prefix = List(
+ Some(shortUsage),
+ Some(explainAdvanced) filter (_ => shouldExplain),
+ Some(label + " options include:")
+ ).flatten mkString "\n"
+
+ prefix + "\n" + availableOptionsMsg(cond)
+ }
+
+ def isStandard(s: Setting[_]): Boolean = !isAdvanced(s) && !isPrivate(s)
+ def isAdvanced(s: Setting[_]): Boolean = s.name startsWith "-X"
+ def isPrivate(s: Setting[_]) : Boolean = s.name startsWith "-Y"
+
+ /** Messages explaining usage and options */
+ def usageMessage = createUsageMsg("where possible standard", shouldExplain = false, isStandard)
+ def xusageMessage = createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced)
+ def yusageMessage = createUsageMsg("Possible private", shouldExplain = true, isPrivate)
+
+ def shouldStopWithInfo = {
+ import settings._
+ Set(help, Xhelp, Yhelp, showPlugins, showPhases) exists (_.value)
+ }
+
+ def infoMessage: String = {
+ import settings._
+ if (help.value) usageMessage
+ else if (Xhelp.value) xusageMessage
+ else if (Yhelp.value) yusageMessage
+// else if (showPlugins.value) global.pluginDescriptions
+// else if (showPhases.value) global.phaseDescriptions + (
+// if (debug.value) "\n" + global.phaseFlagDescriptions else ""
+// )
+ else ""
+ }
+
+ if (summary.errors.nonEmpty) {
+ summary.errors foreach (ctx.error(_))
+ ctx.echo(" dotc -help gives more information")
+ Nil
+ }
+ else if (settings.version.value) {
+ ctx.echo(versionMsg)
+ Nil
+ }
+ else if (shouldStopWithInfo) {
+ ctx.echo(infoMessage)
+ Nil
+ } else {
+ if (sourcesRequired && summary.arguments.isEmpty) ctx.echo(usageMessage)
+ summary.arguments
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala
new file mode 100644
index 000000000..7744a5479
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/Config.scala
@@ -0,0 +1,138 @@
+package dotty.tools.dotc.config
+
+object Config {
+
+ final val cacheMembersNamed = true
+ final val cacheAsSeenFrom = true
+ final val useFingerPrints = true // note: it currently seems to be slightly faster not to use them! my junit test: 548s without, 560s with.
+ final val cacheMemberNames = true
+ final val cacheImplicitScopes = true
+
+ final val checkCacheMembersNamed = false
+
+ /** When updating a constraint bound, check that the constrained parameter
+ * does not appear at the top-level of either of its bounds.
+ */
+ final val checkConstraintsNonCyclic = false
+
+ /** Make sure none of the bounds of a parameter in an OrderingConstraint
+ * contains this parameter at its toplevel (i.e. as an operand of a
+ * combination of &'s and |'s.). The check is performed each time a new bound
+ * is added to the constraint.
+ */
+ final val checkConstraintsSeparated = false
+
+ /** Check that each constraint resulting from a subtype test
+ * is satisfiable.
+ */
+ final val checkConstraintsSatisfiable = false
+
+ /** Check that each constraint is fully propagated. i.e.
+ * If P <: Q then the upper bound of P is a subtype of the upper bound of Q
+ * and the lower bound of Q is a subtype of the lower bound of P.
+ */
+ final val checkConstraintsPropagated = false
+
+ /** Check that constraints of globally committable typer states are closed.
+ * NOTE: When enabled, the check can cause CyclicReference errors because
+ * it traverses all elements of a type. Such failures were observed when
+ * compiling all of dotty together (source seems to be in GenBCode which
+ * accesses javac's settings.)
+ *
+ * It is recommended to turn this option on only when chasing down
+ * a PolyParam instantiation error. See comment in Types.TypeVar.instantiate.
+ */
+ final val debugCheckConstraintsClosed = false
+
+ /** Check that no type appearing as the info of a SymDenotation contains
+ * skolem types.
+ */
+ final val checkNoSkolemsInInfo = false
+
+ /** Type comparer will fail with an assert if the upper bound
+ * of a constrained parameter becomes Nothing. This should be turned
+ * on only for specific debugging as normally instantiation to Nothing
+ * is not an error consdition.
+ */
+ final val failOnInstantiationToNothing = false
+
+ /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set.
+ * The reason to have an option as well as the present global switch is
+ * that the noDoubleDef checking is done in a hotspot, and we do not
+ * want to incur the overhead of checking an option each time.
+ */
+ final val checkNoDoubleBindings = true
+
+ /** Check positions for consistency after parsing */
+ final val checkPositions = true
+
+ /** Show subtype traces for all deep subtype recursions */
+ final val traceDeepSubTypeRecursions = false
+
+ /** When explaining subtypes and this flag is set, also show the classes of the compared types. */
+ final val verboseExplainSubtype = true
+
+ /** If this flag is set, take the fast path when comparing same-named type-aliases and types */
+ final val fastPathForRefinedSubtype = true
+
+ /** If this flag is set, higher-kinded applications are checked for validity
+ */
+ final val checkHKApplications = false
+
+ /** The recursion depth for showing a summarized string */
+ final val summarizeDepth = 2
+
+ /** Check that variances of lambda arguments match the
+ * variance of the underlying lambda class.
+ */
+ final val checkLambdaVariance = false
+
+ /** Check that certain types cannot be created in erasedTypes phases.
+ * Note: Turning this option on will get some false negatives, since it is
+ * possible that And/Or types are still created during erasure as the result
+ * of some operation on an existing type.
+ */
+ final val checkUnerased = false
+
+ /** In `derivedSelect`, rewrite
+ *
+ * (S & T)#A --> S#A & T#A
+ * (S | T)#A --> S#A | T#A
+ *
+ * Not sure whether this is useful. Preliminary measurements show a slowdown of about
+ * 7% for the build when this option is enabled.
+ */
+ final val splitProjections = false
+
+ /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for
+ * `[Xs] -> U` to `[Xs := Ts]U`.
+ * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite.
+ */
+ final val simplifyApplications = true
+
+ /** Initial size of superId table */
+ final val InitialSuperIdsSize = 4096
+
+ /** Initial capacity of uniques HashMap */
+ final val initialUniquesCapacity = 40000
+
+ /** How many recursive calls to NamedType#underlying are performed before logging starts. */
+ final val LogPendingUnderlyingThreshold = 50
+
+ /** How many recursive calls to isSubType are performed before logging starts. */
+ final val LogPendingSubTypesThreshold = 50
+
+ /** How many recursive calls to findMember are performed before logging names starts
+ * Note: this threshold has to be chosen carefully. Too large, and programs
+ * like tests/pos/IterableSelfRec go into polynomial (or even exponential?)
+ * compile time slowdown. Too small and normal programs will cause the compiler to
+ * do inefficient operations on findMember. The current value is determined
+ * so that (1) IterableSelfRec still compiles in reasonable time (< 10sec) (2) Compiling
+ * dotty itself only causes small pending names lists to be generated (we measured
+ * at max 6 elements) and these lists are never searched with contains.
+ */
+ final val LogPendingFindMemberThreshold = 10
+
+ /** Maximal number of outstanding recursive calls to findMember */
+ final val PendingFindMemberLimit = LogPendingFindMemberThreshold * 4
+}
diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
new file mode 100644
index 000000000..a695202d3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
@@ -0,0 +1,70 @@
+package dotty.tools
+package dotc
+package config
+
+import io.{AbstractFile,ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
+import ClassPath.{ JavaContext, DefaultJavaContext }
+import core._
+import Symbols._, Types._, Contexts._, Denotations._, SymDenotations._, StdNames._, Names._
+import Flags._, Scopes._, Decorators._, NameOps._, util.Positions._
+import transform.ExplicitOuter, transform.SymUtils._
+
+class JavaPlatform extends Platform {
+
+ private var currentClassPath: Option[MergedClassPath] = None
+
+ def classPath(implicit ctx: Context): ClassPath = {
+ if (currentClassPath.isEmpty)
+ currentClassPath = Some(new PathResolver().result)
+ val cp = currentClassPath.get
+ //println(cp)
+ cp
+ }
+
+ // The given symbol is a method with the right name and signature to be a runnable java program.
+ def isJavaMainMethod(sym: SymDenotation)(implicit ctx: Context) =
+ (sym.name == nme.main) && (sym.info match {
+ case t@MethodType(_, defn.ArrayOf(el) :: Nil) => el =:= defn.StringType && (t.resultType isRef defn.UnitClass)
+ case _ => false
+ })
+
+ // The given class has a main method.
+ def hasJavaMainMethod(sym: Symbol)(implicit ctx: Context): Boolean =
+ (sym.info member nme.main).hasAltWith {
+ case x: SymDenotation => isJavaMainMethod(x)
+ case _ => false
+ }
+
+ /** Update classpath with a substituted subentry */
+ def updateClassPath(subst: Map[ClassPath, ClassPath]) =
+ currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
+
+ def rootLoader(root: TermSymbol)(implicit ctx: Context): SymbolLoader = new ctx.base.loaders.PackageLoader(root, classPath)
+
+ /** Is the SAMType `cls` also a SAM under the rules of the JVM? */
+ def isSam(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ cls.is(NoInitsTrait) &&
+ cls.superClass == defn.ObjectClass &&
+ cls.directlyInheritedTraits.forall(_.is(NoInits)) &&
+ !ExplicitOuter.needsOuterIfReferenced(cls) &&
+ cls.typeRef.fields.isEmpty // Superaccessors already show up as abstract methods here, so no test necessary
+
+ /** We could get away with excluding BoxedBooleanClass for the
+ * purpose of equality testing since it need not compare equal
+ * to anything but other booleans, but it should be present in
+ * case this is put to other uses.
+ */
+ def isMaybeBoxed(sym: ClassSymbol)(implicit ctx: Context) = {
+ val d = defn
+ import d._
+ (sym == ObjectClass) ||
+ (sym == JavaSerializableClass) ||
+ (sym == ComparableClass) ||
+ (sym derivesFrom BoxedNumberClass) ||
+ (sym derivesFrom BoxedCharClass) ||
+ (sym derivesFrom BoxedBooleanClass)
+ }
+
+ def newClassLoader(bin: AbstractFile)(implicit ctx: Context): SymbolLoader =
+ new ClassfileLoader(bin)
+}
diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala
new file mode 100644
index 000000000..a87eb9bce
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala
@@ -0,0 +1,116 @@
+package dotty.tools
+package dotc
+package config
+
+import io._
+
+/** A class for holding mappings from source directories to
+ * their output location. This functionality can be accessed
+ * only programmatically. The command line compiler uses a
+ * single output location, but tools may use this functionality
+ * to set output location per source directory.
+ */
+class OutputDirs {
+ /** Pairs of source directory - destination directory. */
+ private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil
+
+ /** If this is not None, the output location where all
+ * classes should go.
+ */
+ private var singleOutDir: Option[AbstractFile] = None
+
+ /** Add a destination directory for sources found under srcdir.
+ * Both directories should exits.
+ */
+ def add(srcDir: String, outDir: String): Unit =
+ add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
+ checkDir(AbstractFile.getDirectory(outDir), outDir))
+
+ /** Check that dir is exists and is a directory. */
+ private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = (
+ if (dir != null && dir.isDirectory)
+ dir
+ // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false))
+ else if (allowJar && dir == null && Jar.isJarOrZip(name, false))
+ new PlainFile(Path(name))
+ else
+ throw new FatalError(name + " does not exist or is not a directory"))
+
+ /** Set the single output directory. From now on, all files will
+ * be dumped in there, regardless of previous calls to 'add'.
+ */
+ def setSingleOutput(outDir: String): Unit = {
+ val dst = AbstractFile.getDirectory(outDir)
+ setSingleOutput(checkDir(dst, outDir, true))
+ }
+
+ def getSingleOutput: Option[AbstractFile] = singleOutDir
+
+ /** Set the single output directory. From now on, all files will
+ * be dumped in there, regardless of previous calls to 'add'.
+ */
+ def setSingleOutput(dir: AbstractFile): Unit = {
+ singleOutDir = Some(dir)
+ }
+
+ def add(src: AbstractFile, dst: AbstractFile): Unit = {
+ singleOutDir = None
+ outputDirs ::= ((src, dst))
+ }
+
+ /** Return the list of source-destination directory pairs. */
+ def outputs: List[(AbstractFile, AbstractFile)] = outputDirs
+
+ /** Return the output directory for the given file.
+ */
+ def outputDirFor(src: AbstractFile): AbstractFile = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ src.path.startsWith(srcDir.path)
+
+ singleOutDir match {
+ case Some(d) => d
+ case None =>
+ (outputs find (isBelow _).tupled) match {
+ case Some((_, d)) => d
+ case _ =>
+ throw new FatalError("Could not find an output directory for "
+ + src.path + " in " + outputs)
+ }
+ }
+ }
+
+ /** Return the source file path(s) which correspond to the given
+ * classfile path and SourceFile attribute value, subject to the
+ * condition that source files are arranged in the filesystem
+ * according to Java package layout conventions.
+ *
+ * The given classfile path must be contained in at least one of
+ * the specified output directories. If it does not then this
+ * method returns Nil.
+ *
+ * Note that the source file is not required to exist, so assuming
+ * a valid classfile path this method will always return a list
+ * containing at least one element.
+ *
+ * Also that if two or more source path elements target the same
+ * output directory there will be two or more candidate source file
+ * paths.
+ */
+ def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ classFile.path.startsWith(outDir.path)
+
+ singleOutDir match {
+ case Some(d) =>
+ d match {
+ case _: VirtualDirectory | _: io.ZipArchive => Nil
+ case _ => List(d.lookupPathUnchecked(srcPath, false))
+ }
+ case None =>
+ (outputs filter (isBelow _).tupled) match {
+ case Nil => Nil
+ case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
new file mode 100644
index 000000000..aa4d8aeb0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
@@ -0,0 +1,281 @@
+package dotty.tools
+package dotc
+package config
+
+import java.net.{ URL, MalformedURLException }
+import WrappedProperties.AccessControl
+import io.{ ClassPath, JavaClassPath, File, Directory, Path, AbstractFile }
+import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
+import PartialFunction.condOpt
+import scala.language.postfixOps
+import core.Contexts._
+import Settings._
+
+// Loosely based on the draft specification at:
+// https://wiki.scala-lang.org/display/SW/Classpath
+
+object PathResolver {
+
+ // Imports property/environment functions which suppress
+ // security exceptions.
+ import AccessControl._
+
+ def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse ""
+
+ /** Map all classpath elements to absolute paths and reconstruct the classpath.
+ */
+ def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
+
+ /** pretty print class path */
+ def ppcp(s: String) = split(s) match {
+ case Nil => ""
+ case Seq(x) => x
+ case xs => xs map ("\n" + _) mkString
+ }
+
+ /** Values found solely by inspecting environment or property variables.
+ */
+ object Environment {
+ private def searchForBootClasspath = (
+ systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
+ )
+
+ /** Environment variables which java pays attention to so it
+ * seems we do as well.
+ */
+ def classPathEnv = envOrElse("CLASSPATH", "")
+ def sourcePathEnv = envOrElse("SOURCEPATH", "")
+
+ def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
+
+ def javaExtDirs = propOrEmpty("java.ext.dirs")
+ def scalaHome = propOrEmpty("scala.home")
+ def scalaExtDirs = propOrEmpty("scala.ext.dirs")
+
+ /** The java classpath and whether to use it. */
+ def javaUserClassPath = propOrElse("java.class.path", "")
+ def useJavaClassPath = propOrFalse("scala.usejavacp")
+
+ override def toString = s"""
+ |object Environment {
+ | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath)
+ | javaBootClassPath = <${javaBootClassPath.length} chars>
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ |}""".trim.stripMargin
+ }
+
+ /** Default values based on those in Environment as interpreted according
+ * to the path resolution specification.
+ */
+ object Defaults {
+ def scalaSourcePath = Environment.sourcePathEnv
+ def javaBootClassPath = Environment.javaBootClassPath
+ def javaUserClassPath = Environment.javaUserClassPath
+ def javaExtDirs = Environment.javaExtDirs
+ def useJavaClassPath = Environment.useJavaClassPath
+
+ def scalaHome = Environment.scalaHome
+ def scalaHomeDir = Directory(scalaHome)
+ def scalaHomeExists = scalaHomeDir.isDirectory
+ def scalaLibDir = Directory(scalaHomeDir / "lib")
+ def scalaClassesDir = Directory(scalaHomeDir / "classes")
+
+ def scalaLibAsJar = File(scalaLibDir / "scala-library.jar")
+ def scalaLibAsDir = Directory(scalaClassesDir / "library")
+
+ def scalaLibDirFound: Option[Directory] =
+ if (scalaLibAsJar.isFile) Some(scalaLibDir)
+ else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir)
+ else None
+
+ def scalaLibFound =
+ if (scalaLibAsJar.isFile) scalaLibAsJar.path
+ else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
+ else ""
+
+ // XXX It must be time for someone to figure out what all these things
+ // are intended to do. This is disabled here because it was causing all
+ // the scala jars to end up on the classpath twice: one on the boot
+ // classpath as set up by the runner (or regular classpath under -nobootcp)
+ // and then again here.
+ def scalaBootClassPath = ""
+ // scalaLibDirFound match {
+ // case Some(dir) if scalaHomeExists =>
+ // val paths = ClassPath expandDir dir.path
+ // join(paths: _*)
+ // case _ => ""
+ // }
+
+ def scalaExtDirs = Environment.scalaExtDirs
+
+ def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
+
+ override def toString = """
+ |object Defaults {
+ | scalaHome = %s
+ | javaBootClassPath = %s
+ | scalaLibDirFound = %s
+ | scalaLibFound = %s
+ | scalaBootClassPath = %s
+ | scalaPluginPath = %s
+ |}""".trim.stripMargin.format(
+ scalaHome,
+ ppcp(javaBootClassPath),
+ scalaLibDirFound, scalaLibFound,
+ ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
+ )
+ }
+
+ def fromPathString(path: String)(implicit ctx: Context): JavaClassPath = {
+ val settings = ctx.settings.classpath.update(path)
+ new PathResolver()(ctx.fresh.setSettings(settings)).result
+ }
+
+ /** With no arguments, show the interesting values in Environment and Defaults.
+ * If there are arguments, show those in Calculated as if those options had been
+ * given to a scala runner.
+ */
+ def main(args: Array[String]): Unit = {
+ if (args.isEmpty) {
+ println(Environment)
+ println(Defaults)
+ }
+ else {
+ implicit val ctx: Context = (new ContextBase).initialCtx // Dotty deviation: implicits need explicit type
+ val ArgsSummary(sstate, rest, errors) =
+ ctx.settings.processArguments(args.toList, true)
+ errors.foreach(println)
+ val pr = new PathResolver()(ctx.fresh.setSettings(sstate))
+ println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
+ println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
+ pr.result.show
+ }
+ }
+}
+import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
+
+class PathResolver(implicit ctx: Context) {
+ import ctx.base.settings
+
+ val context = ClassPath.DefaultJavaContext
+
+ private def cmdLineOrElse(name: String, alt: String) = {
+ (commandLineFor(name) match {
+ case Some("") => None
+ case x => x
+ }) getOrElse alt
+ }
+
+ private def commandLineFor(s: String): Option[String] = condOpt(s) {
+ case "javabootclasspath" => settings.javabootclasspath.value
+ case "javaextdirs" => settings.javaextdirs.value
+ case "bootclasspath" => settings.bootclasspath.value
+ case "extdirs" => settings.extdirs.value
+ case "classpath" | "cp" => settings.classpath.value
+ case "sourcepath" => settings.sourcepath.value
+ case "priorityclasspath" => settings.priorityclasspath.value
+ }
+
+ /** Calculated values based on any given command line options, falling back on
+ * those in Defaults.
+ */
+ object Calculated {
+ def scalaHome = Defaults.scalaHome
+ def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
+ def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
+ def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
+ def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
+ def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
+ def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+ def priorityClassPath = cmdLineOrElse("priorityclasspath", "")
+ /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
+ * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
+ * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
+ * [scaladoc] ^
+ * because the bootstrapping will look at the sourcepath and create package "reflect" in "<root>"
+ * and then when typing relative names, instead of picking <root>.scala.relect, typedIdentifier will pick up the
+ * <root>.reflect package created by the bootstrapping. Thus, no bootstrapping for scaladoc!
+ * TODO: we should refactor this as a separate -bootstrap option to have a clean implementation, no? */
+ def sourcePath = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath)
+
+ /** Against my better judgment, giving in to martin here and allowing
+ * CLASSPATH to be used automatically. So for the user-specified part
+ * of the classpath:
+ *
+ * - If -classpath or -cp is given, it is that
+ * - Otherwise, if CLASSPATH is set, it is that
+ * - If neither of those, then "." is used.
+ */
+ def userClassPath = {
+ if (!settings.classpath.isDefault)
+ settings.classpath.value
+ else sys.env.getOrElse("CLASSPATH", ".")
+ }
+
+ import context._
+
+ // Assemble the elements!
+ // priority class path takes precedence
+ def basis = List[Traversable[ClassPath]](
+ classesInExpandedPath(priorityClassPath), // 0. The priority class path (for testing).
+ classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
+ contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
+ classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
+ classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
+ contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
+ classesInExpandedPath(userClassPath), // 6. The Scala application class path.
+ sourcesInPath(sourcePath) // 7. The Scala source path.
+ )
+
+ lazy val containers = basis.flatten.distinct
+
+ override def toString = """
+ |object Calculated {
+ | scalaHome = %s
+ | priorityClassPath = %s
+ | javaBootClassPath = %s
+ | javaExtDirs = %s
+ | javaUserClassPath = %s
+ | useJavaClassPath = %s
+ | scalaBootClassPath = %s
+ | scalaExtDirs = %s
+ | userClassPath = %s
+ | sourcePath = %s
+ |}""".trim.stripMargin.format(
+ scalaHome, ppcp(priorityClassPath),
+ ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
+ useJavaClassPath,
+ ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
+ ppcp(sourcePath)
+ )
+ }
+
+ def containers = Calculated.containers
+
+ lazy val result: JavaClassPath = {
+ // Prioritize `dotty.jar` and `dotty-lib.jar` to shadow others
+ val (dottyJars, others) =
+ containers.partition(x => x.name.contains("dotty-lib.jar") || x.name.contains("dotty.jar"))
+ // Then any jars with `dotty` in the name - putting them before scala-library
+ val (dottyCp, remaining) =
+ others.partition(_.name.contains("dotty-"))
+
+ val cp = new JavaClassPath((dottyJars ++ dottyCp ++ remaining).toIndexedSeq, context)
+
+ if (settings.Ylogcp.value) {
+ Console.println("Classpath built from " + settings.toConciseString(ctx.sstate))
+ Console.println("Defaults: " + PathResolver.Defaults)
+ Console.println("Calculated: " + Calculated)
+
+ val xs = (Calculated.basis drop 2).flatten.distinct
+ println("After java boot/extdirs classpath has %d entries:" format xs.size)
+ xs foreach (x => println(" " + x))
+ }
+ cp
+ }
+
+ def asURLs = result.asURLs
+
+}
diff --git a/compiler/src/dotty/tools/dotc/config/Platform.scala b/compiler/src/dotty/tools/dotc/config/Platform.scala
new file mode 100644
index 000000000..062d9002d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/Platform.scala
@@ -0,0 +1,39 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package dotty.tools
+package dotc
+package config
+
+import io.{ClassPath, AbstractFile}
+import core.Contexts._, core.Symbols._
+import core.SymbolLoader
+
+/** The platform dependent pieces of Global.
+ */
+abstract class Platform {
+
+ /** The root symbol loader. */
+ def rootLoader(root: TermSymbol)(implicit ctx: Context): SymbolLoader
+
+ /** The compiler classpath. */
+ def classPath(implicit ctx: Context): ClassPath
+
+ /** Update classpath with a substitution that maps entries to entries */
+ def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit
+
+ /** Any platform-specific phases. */
+ //def platformPhases: List[SubComponent]
+
+ /** Is the SAMType `cls` also a SAM under the rules of the platform? */
+ def isSam(cls: ClassSymbol)(implicit ctx: Context): Boolean
+
+ /** The various ways a boxed primitive might materialize at runtime. */
+ def isMaybeBoxed(sym: ClassSymbol)(implicit ctx: Context): Boolean
+
+ /** Create a new class loader to load class file `bin` */
+ def newClassLoader(bin: AbstractFile)(implicit ctx: Context): SymbolLoader
+}
+
diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala
new file mode 100644
index 000000000..002d0f933
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/Printers.scala
@@ -0,0 +1,34 @@
+package dotty.tools.dotc.config
+
+object Printers {
+
+ class Printer {
+ def println(msg: => String): Unit = System.out.println(msg)
+ }
+
+ object noPrinter extends Printer {
+ override def println(msg: => String): Unit = ()
+ }
+
+ val default: Printer = new Printer
+ val dottydoc: Printer = noPrinter
+ val core: Printer = noPrinter
+ val typr: Printer = noPrinter
+ val constr: Printer = noPrinter
+ val checks: Printer = noPrinter
+ val overload: Printer = noPrinter
+ val implicits: Printer = noPrinter
+ val implicitsDetailed: Printer = noPrinter
+ val subtyping: Printer = noPrinter
+ val unapp: Printer = noPrinter
+ val gadts: Printer = noPrinter
+ val hk: Printer = noPrinter
+ val variances: Printer = noPrinter
+ val incremental: Printer = noPrinter
+ val config: Printer = noPrinter
+ val transforms: Printer = noPrinter
+ val completions: Printer = noPrinter
+ val cyclicErrors: Printer = noPrinter
+ val pickling: Printer = noPrinter
+ val inlining: Printer = noPrinter
+}
diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala
new file mode 100644
index 000000000..ec1f24d06
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/Properties.scala
@@ -0,0 +1,165 @@
+package dotty.tools
+package dotc
+package config
+
+import java.io.{ IOException, PrintWriter }
+import java.util.jar.Attributes.{ Name => AttributeName }
+
+/** Loads `library.properties` from the jar. */
+object Properties extends PropertiesTrait {
+ protected def propCategory = "library"
+ protected def pickJarBasedOn = classOf[Option[_]]
+
+ /** Scala manifest attributes.
+ */
+ @sharable val ScalaCompilerVersion = new AttributeName("Scala-Compiler-Version")
+}
+
+trait PropertiesTrait {
+ protected def propCategory: String // specializes the remainder of the values
+ protected def pickJarBasedOn: Class[_] // props file comes from jar containing this
+
+ /** The name of the properties file */
+ protected val propFilename = "/" + propCategory + ".properties"
+
+ /** The loaded properties */
+ @sharable protected lazy val scalaProps: java.util.Properties = {
+ val props = new java.util.Properties
+ val stream = pickJarBasedOn getResourceAsStream propFilename
+ if (stream ne null)
+ quietlyDispose(props load stream, stream.close)
+
+ props
+ }
+
+ private def quietlyDispose(action: => Unit, disposal: => Unit) =
+ try { action }
+ finally {
+ try { disposal }
+ catch { case _: IOException => }
+ }
+
+ def propIsSet(name: String) = System.getProperty(name) != null
+ def propIsSetTo(name: String, value: String) = propOrNull(name) == value
+ def propOrElse(name: String, alt: String) = System.getProperty(name, alt)
+ def propOrEmpty(name: String) = propOrElse(name, "")
+ def propOrNull(name: String) = propOrElse(name, null)
+ def propOrNone(name: String) = Option(propOrNull(name))
+ def propOrFalse(name: String) = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase)
+ def setProp(name: String, value: String) = System.setProperty(name, value)
+ def clearProp(name: String) = System.clearProperty(name)
+
+ def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt
+ def envOrNone(name: String) = Option(System getenv name)
+
+ // for values based on propFilename
+ def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
+ def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
+ def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name))
+
+ /** The numeric portion of the runtime Scala version, if this is a final
+ * release. If for instance the versionString says "version 2.9.0.final",
+ * this would return Some("2.9.0").
+ *
+ * @return Some(version) if this is a final release build, None if
+ * it is an RC, Beta, etc. or was built from source, or if the version
+ * cannot be read.
+ */
+ val releaseVersion =
+ for {
+ v <- scalaPropOrNone("maven.version.number")
+ if !(v endsWith "-SNAPSHOT")
+ } yield v
+
+ /** The development Scala version, if this is not a final release.
+ * The precise contents are not guaranteed, but it aims to provide a
+ * unique repository identifier (currently the svn revision) in the
+ * fourth dotted segment if the running version was built from source.
+ *
+ * @return Some(version) if this is a non-final version, None if this
+ * is a final release or the version cannot be read.
+ */
+ val developmentVersion =
+ for {
+ v <- scalaPropOrNone("maven.version.number")
+ if v endsWith "-SNAPSHOT"
+ ov <- scalaPropOrNone("version.number")
+ } yield ov
+
+ /** Either the development or release version if known, otherwise
+ * the empty string.
+ */
+ def versionNumberString = scalaPropOrEmpty("version.number")
+
+ /** The version number of the jar this was loaded from plus "version " prefix,
+ * or "version (unknown)" if it cannot be determined.
+ */
+ val versionString = "version " + "0.01" //scalaPropOrElse("version.number", "(unknown)")" +
+ val copyrightString = "(c) 2013 LAMP/EPFL" // scalaPropOrElse("copyright.string", "(c) 2002-2011 LAMP/EPFL")
+
+ /** This is the encoding to use reading in source files, overridden with -encoding
+ * Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
+ */
+ def sourceEncoding = scalaPropOrElse("file.encoding", "UTF-8")
+ def sourceReader = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader")
+
+ /** This is the default text encoding, overridden (unreliably) with
+ * `JAVA_OPTS="-Dfile.encoding=Foo"`
+ */
+ def encodingString = propOrElse("file.encoding", "UTF-8")
+
+ /** The default end of line character.
+ */
+ def lineSeparator = propOrElse("line.separator", "\n")
+
+ /** Various well-known properties.
+ */
+ def javaClassPath = propOrEmpty("java.class.path")
+ def javaHome = propOrEmpty("java.home")
+ def javaVendor = propOrEmpty("java.vendor")
+ def javaVersion = propOrEmpty("java.version")
+ def javaVmInfo = propOrEmpty("java.vm.info")
+ def javaVmName = propOrEmpty("java.vm.name")
+ def javaVmVendor = propOrEmpty("java.vm.vendor")
+ def javaVmVersion = propOrEmpty("java.vm.version")
+ def osName = propOrEmpty("os.name")
+ def scalaHome = propOrEmpty("scala.home")
+ def tmpDir = propOrEmpty("java.io.tmpdir")
+ def userDir = propOrEmpty("user.dir")
+ def userHome = propOrEmpty("user.home")
+ def userName = propOrEmpty("user.name")
+
+ /** Some derived values.
+ */
+ def isWin = osName startsWith "Windows"
+ def isMac = javaVendor startsWith "Apple"
+
+ // This is looking for javac, tools.jar, etc.
+ // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
+ // and finally the system property based javaHome.
+ def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome))
+
+ def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
+ def scalaCmd = if (isWin) "scala.bat" else "scala"
+ def scalacCmd = if (isWin) "scalac.bat" else "scalac"
+
+ /** Can the java version be determined to be at least as high as the argument?
+ * Hard to properly future proof this but at the rate 1.7 is going we can leave
+ * the issue for our cyborg grandchildren to solve.
+ */
+ def isJavaAtLeast(version: String) = {
+ val okVersions = version match {
+ case "1.5" => List("1.5", "1.6", "1.7")
+ case "1.6" => List("1.6", "1.7")
+ case "1.7" => List("1.7")
+ case _ => Nil
+ }
+ okVersions exists (javaVersion startsWith _)
+ }
+
+ // provide a main method so version info can be obtained by running this
+ def main(args: Array[String]): Unit = {
+ val writer = new PrintWriter(Console.err, true)
+ writer println versionMsg
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
new file mode 100644
index 000000000..fd2ded0b5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -0,0 +1,267 @@
+package dotty.tools.dotc
+package config
+
+import PathResolver.Defaults
+import rewrite.Rewrites
+
+class ScalaSettings extends Settings.SettingGroup {
+
+ protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".")
+
+ /** Path related settings.
+ */
+ val bootclasspath = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath)
+ val extdirs = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs)
+ val javabootclasspath = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath)
+ val javaextdirs = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs)
+ val sourcepath = PathSetting("-sourcepath", "Specify location(s) of source files.", "") // Defaults.scalaSourcePath
+ val argfiles = BooleanSetting("@<file>", "A text file containing compiler arguments (options and source files)")
+ val classpath = PathSetting("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
+ val d = StringSetting("-d", "directory|jar", "destination for generated classfiles.", ".")
+ val priorityclasspath = PathSetting("-priorityclasspath", "class path that takes precedence over all other paths (or testing only)", "")
+
+ /** Other settings.
+ */
+ val dependencyfile = StringSetting("-dependencyfile", "file", "Set dependency tracking file.", ".scala_dependencies")
+ val deprecation = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.")
+ val migration = BooleanSetting("-migration", "Emit warning and location for migration issues from Scala 2.")
+ val encoding = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding)
+ val explaintypes = BooleanSetting("-explaintypes", "Explain type errors in more detail.")
+ val explain = BooleanSetting("-explain", "Explain errors in more detail.")
+ val feature = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
+ val g = ChoiceSetting("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
+ val help = BooleanSetting("-help", "Print a synopsis of standard options")
+ val nowarn = BooleanSetting("-nowarn", "Generate no warnings.")
+ val color = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/)
+ val target = ChoiceSetting("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
+ List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "jvm-1.8", "msil"),
+ "jvm-1.8")
+ val scalajs = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).")
+ val unchecked = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
+ val uniqid = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.")
+ val usejavacp = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+ val verbose = BooleanSetting("-verbose", "Output messages about what the compiler is doing.")
+ val version = BooleanSetting("-version", "Print product version and exit.")
+ val pageWidth = IntSetting("-pagewidth", "Set page width", 80)
+
+ val jvmargs = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
+ val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
+ val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "")
+ val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
+ val strict = BooleanSetting("-strict", "Use strict type rules, which means some formerly legal code does not typecheck anymore.")
+
+ val nospecialization = BooleanSetting("-no-specialization", "Ignore @specialize annotations.")
+ val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
+ val rewrite = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with -language:Scala2 rewrites sources to migrate to new syntax")
+
+ /** -X "Advanced" settings
+ */
+ val Xhelp = BooleanSetting("-X", "Print a synopsis of advanced options.")
+ val assemname = StringSetting("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil")
+ val assemrefs = StringSetting("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil")
+ val assemextdirs = StringSetting("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil")
+ val sourcedir = StringSetting("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil")
+ val checkInit = BooleanSetting("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
+ val noassertions = BooleanSetting("-Xdisable-assertions", "Generate no assertions or assumptions.")
+// val elidebelow = IntSetting("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
+// elidable.MINIMUM, None, elidable.byName get _)
+ val noForwarders = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
+ val genPhaseGraph = StringSetting("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
+ val XlogImplicits = BooleanSetting("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
+ val XminImplicitSearchDepth = IntSetting("-Xmin-implicit-search-depth", "Set number of levels of implicit searches undertaken before checking for divergence.", 5)
+ val xmaxInlines = IntSetting("-Xmax-inlines", "Maximal number of successive inlines", 70)
+ val logImplicitConv = BooleanSetting("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
+ val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
+ val logFreeTerms = BooleanSetting("-Xlog-free-terms", "Print a message when reification creates a free term.")
+ val logFreeTypes = BooleanSetting("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
+ val maxClassfileName = IntSetting("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, 72 to 255)
+ val Xmigration = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.")
+ val Xsource = VersionSetting("-Xsource", "Treat compiler input as Scala source for the specified version.")
+ val Xverify = BooleanSetting("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
+ val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
+ val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
+ val showPlugins = BooleanSetting("-Xplugin-list", "Print a synopsis of loaded plugins.")
+ val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
+ val pluginsDir = StringSetting("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
+ val Xprint = PhasesSetting("-Xprint", "Print out program after")
+ val writeICode = PhasesSetting("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
+ val Xprintpos = BooleanSetting("-Xprint-pos", "Print tree positions, as offsets.")
+ val printtypes = BooleanSetting("-Xprint-types", "Print tree types (debugging option).")
+ val XprintDiff = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.")
+ val XprintDiffDel = BooleanSetting("-Xprint-diff-del", "Print chaged parts of the tree since last print including deleted parts.")
+ val prompt = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).")
+ val script = StringSetting("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
+ val mainClass = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
+ val Xshowcls = StringSetting("-Xshow-class", "class", "Show internal representation of class.", "")
+ val Xshowobj = StringSetting("-Xshow-object", "object", "Show internal representation of object.", "")
+ val showPhases = BooleanSetting("-Xshow-phases", "Print a synopsis of compiler phases.")
+ val sourceReader = StringSetting("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
+ val XnoValueClasses = BooleanSetting("-Xno-value-classes", "Do not use value classes. Helps debugging.")
+ val XreplLineWidth = IntSetting("-Xrepl-line-width", "Maximial number of columns per line for REPL output", 390)
+ val XoldPatmat = BooleanSetting("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
+ val XnoPatmatAnalysis = BooleanSetting("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
+ val XfullLubs = BooleanSetting("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
+
+ /** -Y "Private" settings
+ */
+ val overrideObjects = BooleanSetting("-Yoverride-objects", "Allow member objects to be overridden.")
+ val overrideVars = BooleanSetting("-Yoverride-vars", "Allow vars to be overridden.")
+ val Yhelp = BooleanSetting("-Y", "Print a synopsis of private options.")
+ val browse = PhasesSetting("-Ybrowse", "Browse the abstract syntax tree after")
+ val Ycheck = PhasesSetting("-Ycheck", "Check the tree at the end of")
+ val YcheckMods = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync")
+ val YcheckTypedTrees = BooleanSetting("-YcheckTypedTrees", "Check all constructured typed trees for type correctness")
+ val Yshow = PhasesSetting("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
+ val Ycloselim = BooleanSetting("-Yclosure-elim", "Perform closure elimination.")
+ val Ycompacttrees = BooleanSetting("-Ycompact-trees", "Use compact tree printer when displaying trees.")
+ val noCompletion = BooleanSetting("-Yno-completion", "Disable tab-completion in the REPL.")
+ val Ydce = BooleanSetting("-Ydead-code", "Perform dead code elimination.")
+ val debug = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.")
+ val debugNames = BooleanSetting("-YdebugNames", "Show name-space indicators when printing names")
+ val debugTrace = BooleanSetting("-Ydebug-trace", "Trace core operations")
+ val debugFlags = BooleanSetting("-Ydebug-flags", "Print all flags of definitions")
+ val debugOwners = BooleanSetting("-Ydebug-owners", "Print all owners of definitions (requires -Yprint-syms)")
+ //val doc = BooleanSetting ("-Ydoc", "Generate documentation")
+ val termConflict = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
+ val inlineHandlers = BooleanSetting("-Yinline-handlers", "Perform exception handler inlining when possible.")
+ val YinlinerWarnings = BooleanSetting("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
+ val Ylinearizer = ChoiceSetting("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
+ val log = PhasesSetting("-Ylog", "Log operations during")
+ val Ylogcp = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.")
+ val Ynogenericsig = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.")
+ val YnoImports = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.")
+ val YnoPredef = BooleanSetting("-Yno-predef", "Compile without importing Predef.")
+ val noAdaptedArgs = BooleanSetting("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.")
+ val selfInAnnots = BooleanSetting("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
+ val Yshowtrees = BooleanSetting("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.")
+ val YshowtreesCompact = BooleanSetting("-Yshow-trees-compact", "(Requires -Xprint:) Print detailed ASTs in compact form.")
+ val YshowtreesStringified = BooleanSetting("-Yshow-trees-stringified", "(Requires -Xprint:) Print stringifications along with detailed ASTs.")
+ val Yshowsyms = BooleanSetting("-Yshow-syms", "Print the AST symbol hierarchy after each phase.")
+ val Yshowsymkinds = BooleanSetting("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
+ val Yskip = PhasesSetting("-Yskip", "Skip")
+ val Ygenjavap = StringSetting("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
+ val Ydumpclasses = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
+ val Ynosqueeze = BooleanSetting("-Yno-squeeze", "Disable creation of compact code in matching.")
+ val YstopAfter = PhasesSetting("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
+ val YstopBefore = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully
+ val refinementMethodDispatch = ChoiceSetting("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
+ val Yrangepos = BooleanSetting("-Yrangepos", "Use range positions for syntax trees.")
+ val Ybuilderdebug = ChoiceSetting("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
+ val Yreifycopypaste = BooleanSetting("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Yreplsync = BooleanSetting("-Yrepl-sync", "Do not use asynchronous code for repl startup")
+ val YmethodInfer = BooleanSetting("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
+ val etaExpandKeepsStar = BooleanSetting("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
+ val Yinvalidate = StringSetting("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
+ val noSelfCheck = BooleanSetting("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
+ val YtraceContextCreation = BooleanSetting("-Ytrace-context-creation", "Store stack trace of context creations.")
+ val YshowSuppressedErrors = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally supressed.")
+ val Yheartbeat = BooleanSetting("-Yheartbeat", "show heartbeat stack trace of compiler operations.")
+ val Yprintpos = BooleanSetting("-Yprintpos", "show tree positions.")
+ val YnoDeepSubtypes = BooleanSetting("-Yno-deep-subtypes", "throw an exception on deep subtyping call stacks.")
+ val YplainPrinter = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.")
+ val YprintSyms = BooleanSetting("-Yprint-syms", "when printing trees print info in symbols instead of corresponding info in trees.")
+ val YtestPickler = BooleanSetting("-Ytest-pickler", "self-test for pickling functionality; should be used with -Ystop-after:pickler")
+ val YcheckReentrant = BooleanSetting("-Ycheck-reentrant", "check that compiled program does not contain vars that can be accessed from a global root.")
+ val YkeepComments = BooleanSetting("-Ykeep-comments", "Keep comments when scanning source files.")
+ val YforceSbtPhases = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.")
+ val YdumpSbtInc = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.")
+ val YcheckAllPatmat = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm)")
+ def stop = YstopAfter
+
+ /** Area-specific debug output.
+ */
+ val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
+ val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
+ val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
+ val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
+ val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
+ val Yissuedebug = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.")
+ val YmacrodebugLite = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.")
+ val YmacrodebugVerbose = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
+ val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
+ val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
+ val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
+ val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.")
+ val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
+ val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+ val Yexplainlowlevel = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.")
+ val YnoDoubleBindings = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).")
+ val YshowVarBounds = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds")
+ val YnoInline = BooleanSetting("-Yno-inline", "Suppress inlining.")
+
+ val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize"
+
+ /** IDE-specific settings
+ */
+ val YpresentationVerbose = BooleanSetting("-Ypresentation-verbose", "Print information about presentation compiler tasks.")
+ val YpresentationDebug = BooleanSetting("-Ypresentation-debug", "Enable debugging output for the presentation compiler.")
+ val YpresentationStrict = BooleanSetting("-Ypresentation-strict", "Do not report type errors in sources with syntax errors.")
+
+ val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
+ val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
+ val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999)
+
+ /** Doc specific settings */
+ val template = OptionSetting[String](
+ "-template",
+ "A mustache template for rendering each top-level entity in the API"
+ )
+
+ val resources = OptionSetting[String](
+ "-resources",
+ "A directory containing static resources needed for the API documentation"
+ )
+
+ val DocTitle = StringSetting (
+ "-Ydoc-title",
+ "title",
+ "The overall name of the Scaladoc site",
+ ""
+ )
+
+ val DocVersion = StringSetting (
+ "-Ydoc-version",
+ "version",
+ "An optional version number, to be appended to the title",
+ ""
+ )
+
+ val DocOutput = StringSetting (
+ "-Ydoc-output",
+ "outdir",
+ "The output directory in which to place the documentation",
+ "."
+ )
+
+ val DocFooter = StringSetting (
+ "-Ydoc-footer",
+ "footer",
+ "A footer on every Scaladoc page, by default the EPFL/Lightbend copyright notice. Can be overridden with a custom footer.",
+ ""
+ )
+
+ val DocUncompilable = StringSetting (
+ "-Ydoc-no-compile",
+ "path",
+ "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
+ ""
+ )
+
+ //def DocUncompilableFiles(implicit ctx: Context) = DocUncompilable.value match {
+ // case "" => Nil
+ // case path => io.Directory(path).deepFiles.filter(_ hasExtension "scala").toList
+ //}
+
+ val DocExternalDoc = MultiStringSetting (
+ "-Ydoc-external-doc",
+ "external-doc",
+ "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
+ )
+
+ val DocAuthor = BooleanSetting("-Ydoc-author", "Include authors.", true)
+
+ val DocGroups = BooleanSetting (
+ "-Ydoc:groups",
+ "Group similar functions together (based on the @group annotation)"
+ )
+}
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala
new file mode 100644
index 000000000..02ba74af9
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala
@@ -0,0 +1,184 @@
+/* @author James Iry
+ */
+package dotty.tools
+package dotc.config
+
+import scala.util.{Try, Success, Failure}
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+sealed abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+@sharable case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+@sharable case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Methods for parsing ScalaVersions
+ */
+@sharable object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def parse(versionString : String): Try[ScalaVersion] = {
+ def failure = Failure(new NumberFormatException(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ ))
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = Try(toInt(s)).isSuccess
+
+ import ScalaBuild._
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "" | "any" => Success(AnyScalaVersion)
+ case "none" => Success(NoScalaVersion)
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS)))
+ case _ => failure
+ } catch {
+ case e: NumberFormatException => failure
+ }
+ }
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = parse(util.Properties.versionNumberString).get
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+
+object ScalaBuild {
+
+ /** A development, test, nightly, snapshot or other "unofficial" build
+ */
+ case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+ }
+
+ /** A final build
+ */
+ case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+ }
+
+ /** A candidate for final release
+ */
+ case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+ }
+
+ /** An intermediate release
+ */
+ case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala
new file mode 100644
index 000000000..cffa047fe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/Settings.scala
@@ -0,0 +1,270 @@
+package dotty.tools.dotc
+package config
+
+import collection.mutable.{ ArrayBuffer }
+import scala.util.{ Try, Success, Failure }
+import scala.reflect.internal.util.StringOps
+import reflect.ClassTag
+import core.Contexts._
+// import annotation.unchecked
+ // Dotty deviation: Imports take precedence over definitions in enclosing package
+ // (Note that @unchecked is in scala, not annotation, so annotation.unchecked gives
+ // us a package, which is not what was intended anyway).
+import language.existentials
+
+object Settings {
+
+ val BooleanTag = ClassTag.Boolean
+ val IntTag = ClassTag.Int
+ val StringTag = ClassTag(classOf[String])
+ val ListTag = ClassTag(classOf[List[_]])
+ val VersionTag = ClassTag(classOf[ScalaVersion])
+ val OptionTag = ClassTag(classOf[Option[_]])
+
+ class SettingsState(initialValues: Seq[Any]) {
+ private var values = ArrayBuffer(initialValues: _*)
+ private var _wasRead: Boolean = false
+
+ override def toString = s"SettingsState(values: ${values.toList})"
+
+ def value(idx: Int): Any = {
+ _wasRead = true
+ values(idx)
+ }
+
+ def update(idx: Int, x: Any): SettingsState =
+ if (_wasRead)
+ new SettingsState(values).update(idx, x)
+ else {
+ values(idx) = x
+ this
+ }
+ }
+
+ case class ArgsSummary(
+ sstate: SettingsState,
+ arguments: List[String],
+ errors: List[String]) {
+
+ def fail(msg: String) =
+ ArgsSummary(sstate, arguments, errors :+ msg)
+ }
+
+ case class Setting[T: ClassTag] private[Settings] (
+ name: String,
+ description: String,
+ default: T,
+ helpArg: String = "",
+ choices: Seq[T] = Nil,
+ prefix: String = "",
+ aliases: List[String] = Nil,
+ depends: List[(Setting[_], Any)] = Nil,
+ propertyClass: Option[Class[_]] = None)(private[Settings] val idx: Int) {
+
+ def withAbbreviation(abbrv: String): Setting[T] =
+ copy(aliases = aliases :+ abbrv)(idx)
+
+ def dependsOn[U](setting: Setting[U], value: U): Setting[T] =
+ copy(depends = depends :+ (setting, value))(idx)
+
+ def valueIn(state: SettingsState): T =
+ state.value(idx).asInstanceOf[T]
+
+ def updateIn(state: SettingsState, x: Any): SettingsState = x match {
+ case _: T => state.update(idx, x)
+ case _ =>
+ // would like to do:
+ // throw new ClassCastException(s"illegal argument, found: $x of type ${x.getClass}, required: ${implicitly[ClassTag[T]]}")
+ // but this runs afoul of primitive types. Concretely: if T is Boolean, then x is a boxed Boolean and the test will fail.
+ // Maybe this is a bug in Scala 2.10?
+ state.update(idx, x.asInstanceOf[T])
+ }
+
+ def isDefaultIn(state: SettingsState) = valueIn(state) == default
+
+ def legalChoices: String =
+ if (choices.isEmpty) ""
+ else choices match {
+ case r: Range => r.head + ".." + r.last
+ case xs: List[_] => xs.mkString(", ")
+ }
+
+ def isLegal(arg: Any): Boolean =
+ if (choices.isEmpty)
+ arg match {
+ case _: T => true
+ case _ => false
+ }
+ else choices match {
+ case r: Range =>
+ arg match {
+ case x: Int => r.head <= x && x <= r.last
+ case _ => false
+ }
+ case xs: List[_] =>
+ xs contains arg
+ }
+
+ def tryToSet(state: ArgsSummary): ArgsSummary = {
+ val ArgsSummary(sstate, arg :: args, errors) = state
+ def update(value: Any, args: List[String]) =
+ ArgsSummary(updateIn(sstate, value), args, errors)
+ def fail(msg: String, args: List[String]) =
+ ArgsSummary(sstate, args, errors :+ msg)
+ def missingArg =
+ fail(s"missing argument for option $name", args)
+ def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match {
+ case (BooleanTag, _) =>
+ update(true, args)
+ case (OptionTag, _) =>
+ update(Some(propertyClass.get.newInstance), args)
+ case (ListTag, _) =>
+ if (argRest.isEmpty) missingArg
+ else update((argRest split ",").toList, args)
+ case (StringTag, _) if choices.nonEmpty =>
+ if (argRest.isEmpty) missingArg
+ else if (!choices.contains(argRest))
+ fail(s"$arg is not a valid choice for $name", args)
+ else update(argRest, args)
+ case (StringTag, arg2 :: args2) =>
+ update(arg2, args2)
+ case (IntTag, arg2 :: args2) =>
+ try {
+ val x = arg2.toInt
+ choices match {
+ case r: Range if x < r.head || r.last < x =>
+ fail(s"$arg2 is out of legal range $legalChoices for $name", args2)
+ case _ =>
+ update(x, args2)
+ }
+ } catch {
+ case _: NumberFormatException =>
+ fail(s"$arg2 is not an integer argument for $name", args2)
+ }
+ case (VersionTag, _) =>
+ ScalaVersion.parse(argRest) match {
+ case Success(v) => update(v, args)
+ case Failure(ex) => fail(ex.getMessage, args)
+ }
+ case (_, Nil) =>
+ missingArg
+ }
+
+ if (prefix != "" && arg.startsWith(prefix))
+ doSet(arg drop prefix.length)
+ else if (prefix == "" && name == arg.takeWhile(_ != ':'))
+ doSet(arg.dropWhile(_ != ':').drop(1))
+ else
+ state
+ }
+ }
+
+ object Setting {
+ implicit class SettingDecorator[T](val setting: Setting[T]) extends AnyVal {
+ def value(implicit ctx: Context): T = setting.valueIn(ctx.sstate)
+ def update(x: T)(implicit ctx: Context): SettingsState = setting.updateIn(ctx.sstate, x)
+ def isDefault(implicit ctx: Context): Boolean = setting.isDefaultIn(ctx.sstate)
+ }
+ }
+
+ class SettingGroup {
+
+ val _allSettings = new ArrayBuffer[Setting[_]]
+ def allSettings: Seq[Setting[_]] = _allSettings
+
+ def defaultState = new SettingsState(allSettings map (_.default))
+
+ def userSetSettings(state: SettingsState) =
+ allSettings filterNot (_.isDefaultIn(state))
+
+ def toConciseString(state: SettingsState) =
+ userSetSettings(state).mkString("(", " ", ")")
+
+ private def checkDependencies(state: ArgsSummary): ArgsSummary =
+ (state /: userSetSettings(state.sstate))(checkDependenciesOfSetting)
+
+ private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[_]) =
+ (state /: setting.depends) { (s, dep) =>
+ val (depSetting, reqValue) = dep
+ if (depSetting.valueIn(state.sstate) == reqValue) s
+ else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})")
+ }
+
+ /** Iterates over the arguments applying them to settings where applicable.
+ * Then verifies setting dependencies are met.
+ *
+ * This temporarily takes a boolean indicating whether to keep
+ * processing if an argument is seen which is not a command line option.
+ * This is an expedience for the moment so that you can say
+ *
+ * scalac -d /tmp foo.scala -optimise
+ *
+ * while also allowing
+ *
+ * scala Program opt opt
+ *
+ * to get their arguments.
+ */
+ protected def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = {
+ def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors)
+ state.arguments match {
+ case Nil =>
+ checkDependencies(stateWithArgs(skipped))
+ case "--" :: args =>
+ checkDependencies(stateWithArgs(skipped ++ args))
+ case x :: _ if x startsWith "-" =>
+ def loop(settings: List[Setting[_]]): ArgsSummary = settings match {
+ case setting :: settings1 =>
+ val state1 = setting.tryToSet(state)
+ if (state1 ne state) processArguments(state1, processAll, skipped)
+ else loop(settings1)
+ case Nil =>
+ state.fail(s"bad option: '$x'")
+ }
+ loop(allSettings.toList)
+ case arg :: args =>
+ if (processAll) processArguments(stateWithArgs(args), processAll, skipped :+ arg)
+ else state
+ }
+ }
+
+ def processArguments(arguments: List[String], processAll: Boolean)(implicit ctx: Context): ArgsSummary =
+ processArguments(ArgsSummary(ctx.sstate, arguments, Nil), processAll, Nil)
+
+ def publish[T](settingf: Int => Setting[T]): Setting[T] = {
+ val setting = settingf(_allSettings.length)
+ _allSettings += setting
+ setting
+ }
+
+ def BooleanSetting(name: String, descr: String, initialValue: Boolean = false): Setting[Boolean] =
+ publish(Setting(name, descr, initialValue))
+
+ def StringSetting(name: String, helpArg: String, descr: String, default: String): Setting[String] =
+ publish(Setting(name, descr, default, helpArg))
+
+ def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String): Setting[String] =
+ publish(Setting(name, descr, default, helpArg, choices))
+
+ def IntSetting(name: String, descr: String, default: Int, range: Seq[Int] = Nil): Setting[Int] =
+ publish(Setting(name, descr, default, choices = range))
+
+ def MultiStringSetting(name: String, helpArg: String, descr: String): Setting[List[String]] =
+ publish(Setting(name, descr, Nil, helpArg))
+
+ def PathSetting(name: String, descr: String, default: String): Setting[String] =
+ publish(Setting(name, descr, default))
+
+ def PhasesSetting(name: String, descr: String, default: String = ""): Setting[List[String]] =
+ publish(Setting(name, descr, if (default.isEmpty) Nil else List(default)))
+
+ def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] =
+ publish(Setting(name, descr, Nil, prefix = pre))
+
+ def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] =
+ publish(Setting(name, descr, default))
+
+ def OptionSetting[T: ClassTag](name: String, descr: String): Setting[Option[T]] =
+ publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass)))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala
new file mode 100644
index 000000000..07972b99b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala
@@ -0,0 +1,34 @@
+package dotty.tools
+package dotc
+package config
+
+import java.security.AccessControlException
+
+/** For placing a wrapper function around property functions.
+ * Motivated by places like google app engine throwing exceptions
+ * on property lookups.
+ */
+trait WrappedProperties extends PropertiesTrait {
+ def wrap[T](body: => T): Option[T]
+
+ protected def propCategory = "wrapped"
+ protected def pickJarBasedOn = this.getClass
+
+ override def propIsSet(name: String) = wrap(super.propIsSet(name)) exists (x => x)
+ override def propOrElse(name: String, alt: String) = wrap(super.propOrElse(name, alt)) getOrElse alt
+ override def setProp(name: String, value: String) = wrap(super.setProp(name, value)).orNull
+ override def clearProp(name: String) = wrap(super.clearProp(name)).orNull
+ override def envOrElse(name: String, alt: String) = wrap(super.envOrElse(name, alt)) getOrElse alt
+ override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten
+
+ def systemProperties: Iterator[(String, String)] = {
+ import scala.collection.JavaConverters._
+ wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty
+ }
+}
+
+object WrappedProperties {
+ object AccessControl extends WrappedProperties {
+ def wrap[T](body: => T) = try Some(body) catch { case _: AccessControlException => None }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala
new file mode 100644
index 000000000..0e8e5a1f0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala
@@ -0,0 +1,162 @@
+package dotty.tools.dotc
+package core
+
+import Symbols._, Types._, util.Positions._, Contexts._, Constants._, ast.tpd._
+import config.ScalaVersion
+import StdNames._
+import dotty.tools.dotc.ast.{tpd, untpd}
+
+object Annotations {
+
+ abstract class Annotation {
+ def tree(implicit ctx: Context): Tree
+ def symbol(implicit ctx: Context): Symbol =
+ if (tree.symbol.isConstructor) tree.symbol.owner
+ else tree.tpe.typeSymbol
+ def matches(cls: Symbol)(implicit ctx: Context): Boolean = symbol.derivesFrom(cls)
+ def appliesToModule: Boolean = true // for now; see remark in SymDenotations
+
+ def derivedAnnotation(tree: Tree)(implicit ctx: Context) =
+ if (tree eq this.tree) this else Annotation(tree)
+
+ def arguments(implicit ctx: Context) = ast.tpd.arguments(tree)
+ def argument(i: Int)(implicit ctx: Context): Option[Tree] = {
+ val args = arguments
+ if (i < args.length) Some(args(i)) else None
+ }
+ def argumentConstant(i: Int)(implicit ctx: Context): Option[Constant] =
+ for (ConstantType(c) <- argument(i) map (_.tpe)) yield c
+
+ def ensureCompleted(implicit ctx: Context): Unit = tree
+ }
+
+ case class ConcreteAnnotation(t: Tree) extends Annotation {
+ def tree(implicit ctx: Context): Tree = t
+ }
+
+ abstract case class LazyAnnotation(sym: Symbol) extends Annotation {
+ private var myTree: Tree = null
+ def tree(implicit ctx: Context) = {
+ if (myTree == null) myTree = complete(ctx)
+ myTree
+ }
+ def complete(implicit ctx: Context): Tree
+ override def symbol(implicit ctx: Context): Symbol = sym
+ }
+
+ /** An annotation indicating the body of a right-hand side,
+ * typically of an inline method. Treated specially in
+ * pickling/unpickling and TypeTreeMaps
+ */
+ abstract class BodyAnnotation extends Annotation {
+ override def symbol(implicit ctx: Context) = defn.BodyAnnot
+ override def derivedAnnotation(tree: Tree)(implicit ctx: Context) =
+ if (tree eq this.tree) this else ConcreteBodyAnnotation(tree)
+ override def arguments(implicit ctx: Context) = Nil
+ override def ensureCompleted(implicit ctx: Context) = ()
+ }
+
+ case class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation {
+ def tree(implicit ctx: Context) = body
+ }
+
+ case class LazyBodyAnnotation(bodyExpr: Context => Tree) extends BodyAnnotation {
+ private var evaluated = false
+ private var myBody: Tree = _
+ def tree(implicit ctx: Context) = {
+ if (evaluated) assert(myBody != null)
+ else {
+ evaluated = true
+ myBody = bodyExpr(ctx)
+ }
+ myBody
+ }
+ def isEvaluated = evaluated
+ }
+
+ object Annotation {
+
+ def apply(tree: Tree) = ConcreteAnnotation(tree)
+
+ def apply(cls: ClassSymbol)(implicit ctx: Context): Annotation =
+ apply(cls, Nil)
+
+ def apply(cls: ClassSymbol, arg: Tree)(implicit ctx: Context): Annotation =
+ apply(cls, arg :: Nil)
+
+ def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(implicit ctx: Context): Annotation =
+ apply(cls, arg1 :: arg2 :: Nil)
+
+ def apply(cls: ClassSymbol, args: List[Tree])(implicit ctx: Context): Annotation =
+ apply(cls.typeRef, args)
+
+ def apply(atp: Type, arg: Tree)(implicit ctx: Context): Annotation =
+ apply(atp, arg :: Nil)
+
+ def apply(atp: Type, arg1: Tree, arg2: Tree)(implicit ctx: Context): Annotation =
+ apply(atp, arg1 :: arg2 :: Nil)
+
+ def apply(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation =
+ apply(New(atp, args))
+
+ private def resolveConstructor(atp: Type, args:List[Tree])(implicit ctx: Context): Tree = {
+ val targs = atp.argTypes
+ tpd.applyOverloaded(New(atp withoutArgs targs), nme.CONSTRUCTOR, args, targs, atp, isAnnotConstructor = true)
+ }
+
+ def applyResolve(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation = {
+ apply(resolveConstructor(atp, args))
+ }
+
+ def deferred(sym: Symbol, treeFn: Context => Tree)(implicit ctx: Context): Annotation =
+ new LazyAnnotation(sym) {
+ def complete(implicit ctx: Context) = treeFn(ctx)
+ }
+
+ def deferred(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation =
+ deferred(atp.classSymbol, implicit ctx => New(atp, args))
+
+ def deferredResolve(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation =
+ deferred(atp.classSymbol, implicit ctx => resolveConstructor(atp, args))
+
+ def makeAlias(sym: TermSymbol)(implicit ctx: Context) =
+ apply(defn.AliasAnnot, List(
+ ref(TermRef.withSigAndDenot(sym.owner.thisType, sym.name, sym.signature, sym))))
+
+ def makeChild(sym: Symbol)(implicit ctx: Context) =
+ deferred(defn.ChildAnnot,
+ implicit ctx => New(defn.ChildAnnotType.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil))
+
+ def makeSourceFile(path: String)(implicit ctx: Context) =
+ apply(defn.SourceFileAnnot, Literal(Constant(path)))
+ }
+
+ def ThrowsAnnotation(cls: ClassSymbol)(implicit ctx: Context) = {
+ val tref = cls.typeRef
+ Annotation(defn.ThrowsAnnotType.appliedTo(tref), Ident(tref))
+ }
+
+ /** A decorator that provides queries for specific annotations
+ * of a symbol.
+ */
+ implicit class AnnotInfo(val sym: Symbol) extends AnyVal {
+
+ def isDeprecated(implicit ctx: Context) =
+ sym.hasAnnotation(defn.DeprecatedAnnot)
+
+ def deprecationMessage(implicit ctx: Context) =
+ for (annot <- sym.getAnnotation(defn.DeprecatedAnnot);
+ arg <- annot.argumentConstant(0))
+ yield arg.stringValue
+
+ def migrationVersion(implicit ctx: Context) =
+ for (annot <- sym.getAnnotation(defn.MigrationAnnot);
+ arg <- annot.argumentConstant(1))
+ yield ScalaVersion.parse(arg.stringValue)
+
+ def migrationMessage(implicit ctx: Context) =
+ for (annot <- sym.getAnnotation(defn.MigrationAnnot);
+ arg <- annot.argumentConstant(0))
+ yield ScalaVersion.parse(arg.stringValue)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
new file mode 100644
index 000000000..78ec685fc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
@@ -0,0 +1,132 @@
+package dotty.tools
+package dotc
+package core
+
+import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
+import SymDenotations._, Denotations.SingleDenotation
+import util.Positions._
+import Decorators._
+import StdNames._
+import Annotations._
+import collection.mutable
+import ast.tpd._
+
+/** Realizability status */
+object CheckRealizable {
+
+ abstract class Realizability(val msg: String) {
+ def andAlso(other: => Realizability) =
+ if (this == Realizable) other else this
+ def mapError(f: Realizability => Realizability) =
+ if (this == Realizable) this else f(this)
+ }
+
+ object Realizable extends Realizability("")
+
+ object NotConcrete extends Realizability(" is not a concrete type")
+
+ object NotStable extends Realizability(" is not a stable reference")
+
+ class NotFinal(sym: Symbol)(implicit ctx: Context)
+ extends Realizability(i" refers to nonfinal $sym")
+
+ class HasProblemBounds(typ: SingleDenotation)(implicit ctx: Context)
+ extends Realizability(i" has a member $typ with possibly conflicting bounds ${typ.info.bounds.lo} <: ... <: ${typ.info.bounds.hi}")
+
+ class HasProblemField(fld: SingleDenotation, problem: Realizability)(implicit ctx: Context)
+ extends Realizability(i" has a member $fld which is not a legal path\n since ${fld.symbol.name}: ${fld.info}${problem.msg}")
+
+ class ProblemInUnderlying(tp: Type, problem: Realizability)(implicit ctx: Context)
+ extends Realizability(i"s underlying type ${tp}${problem.msg}") {
+ assert(problem != Realizable)
+ }
+
+ def realizability(tp: Type)(implicit ctx: Context) =
+ new CheckRealizable().realizability(tp)
+
+ def boundsRealizability(tp: Type)(implicit ctx: Context) =
+ new CheckRealizable().boundsRealizability(tp)
+}
+
+/** Compute realizability status */
+class CheckRealizable(implicit ctx: Context) {
+ import CheckRealizable._
+
+ /** A set of all fields that have already been checked. Used
+ * to avoid infinite recursions when analyzing recursive types.
+ */
+ private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]()
+
+ /** Is symbol's definitition a lazy val?
+ * (note we exclude modules here, because their realizability is ensured separately)
+ */
+ private def isLateInitialized(sym: Symbol) = sym.is(Lazy, butNot = Module)
+
+ /** The realizability status of given type `tp`*/
+ def realizability(tp: Type): Realizability = tp.dealias match {
+ case tp: TermRef =>
+ val sym = tp.symbol
+ if (sym.is(Stable)) realizability(tp.prefix)
+ else {
+ val r =
+ if (!sym.isStable) NotStable
+ else if (!isLateInitialized(sym)) realizability(tp.prefix)
+ else if (!sym.isEffectivelyFinal) new NotFinal(sym)
+ else realizability(tp.info).mapError(r => new ProblemInUnderlying(tp.info, r))
+ if (r == Realizable) sym.setFlag(Stable)
+ r
+ }
+ case _: SingletonType | NoPrefix =>
+ Realizable
+ case tp =>
+ def isConcrete(tp: Type): Boolean = tp.dealias match {
+ case tp: TypeRef => tp.symbol.isClass
+ case tp: TypeProxy => isConcrete(tp.underlying)
+ case tp: AndOrType => isConcrete(tp.tp1) && isConcrete(tp.tp2)
+ case _ => false
+ }
+ if (!isConcrete(tp)) NotConcrete
+ else boundsRealizability(tp).andAlso(memberRealizability(tp))
+ }
+
+ /** `Realizable` if `tp` has good bounds, a `HasProblemBounds` instance
+ * pointing to a bad bounds member otherwise.
+ */
+ private def boundsRealizability(tp: Type) = {
+ def hasBadBounds(mbr: SingleDenotation) = {
+ val bounds = mbr.info.bounds
+ !(bounds.lo <:< bounds.hi)
+ }
+ tp.nonClassTypeMembers.find(hasBadBounds) match {
+ case Some(mbr) => new HasProblemBounds(mbr)
+ case _ => Realizable
+ }
+ }
+
+ /** `Realizable` if all of `tp`'s non-struct fields have realizable types,
+ * a `HasProblemField` instance pointing to a bad field otherwise.
+ */
+ private def memberRealizability(tp: Type) = {
+ def checkField(sofar: Realizability, fld: SingleDenotation): Realizability =
+ sofar andAlso {
+ if (checkedFields.contains(fld.symbol) || fld.symbol.is(Private | Mutable | Lazy))
+ // if field is private it cannot be part of a visible path
+ // if field is mutable it cannot be part of a path
+ // if field is lazy it does not need to be initialized when the owning object is
+ // so in all cases the field does not influence realizability of the enclosing object.
+ Realizable
+ else {
+ checkedFields += fld.symbol
+ realizability(fld.info).mapError(r => new HasProblemField(fld, r))
+ }
+ }
+ if (ctx.settings.strict.value)
+ // check fields only under strict mode for now.
+ // Reason: An embedded field could well be nullable, which means it
+ // should not be part of a path and need not be checked; but we cannot recognize
+ // this situation until we have a typesystem that tracks nullability.
+ ((Realizable: Realizability) /: tp.fields)(checkField)
+ else
+ Realizable
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala
new file mode 100644
index 000000000..1e623db4d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Comments.scala
@@ -0,0 +1,459 @@
+package dotty.tools
+package dotc
+package core
+
+import ast.{ untpd, tpd }
+import Decorators._, Symbols._, Contexts._, Flags.EmptyFlags
+import util.SourceFile
+import util.Positions._
+import util.CommentParsing._
+import util.Property.Key
+import parsing.Parsers.Parser
+import reporting.diagnostic.messages.ProperDefinitionNotFound
+
+object Comments {
+ val ContextDoc = new Key[ContextDocstrings]
+
+ /** Decorator for getting docbase out of context */
+ implicit class CommentsContext(val ctx: Context) extends AnyVal {
+ def docCtx: Option[ContextDocstrings] = ctx.property(ContextDoc)
+ }
+
+ /** Context for Docstrings, contains basic functionality for getting
+ * docstrings via `Symbol` and expanding templates
+ */
+ class ContextDocstrings {
+ import scala.collection.mutable
+
+ private[this] val _docstrings: mutable.Map[Symbol, Comment] =
+ mutable.Map.empty
+
+ val templateExpander = new CommentExpander
+
+ def docstrings: Map[Symbol, Comment] = _docstrings.toMap
+
+ def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym)
+
+ def addDocstring(sym: Symbol, doc: Option[Comment]): Unit =
+ doc.map(d => _docstrings += (sym -> d))
+ }
+
+ /** A `Comment` contains the unformatted docstring as well as a position
+ *
+ * The `Comment` contains functionality to create versions of itself without
+ * `@usecase` sections as well as functionality to map the `raw` docstring
+ */
+ abstract case class Comment(pos: Position, raw: String) { self =>
+ def isExpanded: Boolean
+
+ def usecases: List[UseCase]
+
+ val isDocComment = raw.startsWith("/**")
+
+ def expand(f: String => String): Comment = new Comment(pos, f(raw)) {
+ val isExpanded = true
+ val usecases = self.usecases
+ }
+
+ def withUsecases(implicit ctx: Context): Comment = new Comment(pos, stripUsecases) {
+ val isExpanded = self.isExpanded
+ val usecases = parseUsecases
+ }
+
+ private[this] lazy val stripUsecases: String =
+ removeSections(raw, "@usecase", "@define")
+
+ private[this] def parseUsecases(implicit ctx: Context): List[UseCase] =
+ if (!raw.startsWith("/**"))
+ List.empty[UseCase]
+ else
+ tagIndex(raw)
+ .filter { startsWithTag(raw, _, "@usecase") }
+ .map { case (start, end) => decomposeUseCase(start, end) }
+
+ /** Turns a usecase section into a UseCase, with code changed to:
+ * {{{
+ * // From:
+ * def foo: A
+ * // To:
+ * def foo: A = ???
+ * }}}
+ */
+ private[this] def decomposeUseCase(start: Int, end: Int)(implicit ctx: Context): UseCase = {
+ def subPos(start: Int, end: Int) =
+ if (pos == NoPosition) NoPosition
+ else {
+ val start1 = pos.start + start
+ val end1 = pos.end + end
+ pos withStart start1 withPoint start1 withEnd end1
+ }
+
+ val codeStart = skipWhitespace(raw, start + "@usecase".length)
+ val codeEnd = skipToEol(raw, codeStart)
+ val code = raw.substring(codeStart, codeEnd) + " = ???"
+ val codePos = subPos(codeStart, codeEnd)
+ val commentStart = skipLineLead(raw, codeEnd + 1) min end
+ val commentStr = "/** " + raw.substring(commentStart, end) + "*/"
+ val commentPos = subPos(commentStart, end)
+
+ UseCase(Comment(commentPos, commentStr), code, codePos)
+ }
+ }
+
+ object Comment {
+ def apply(pos: Position, raw: String, expanded: Boolean = false, usc: List[UseCase] = Nil)(implicit ctx: Context): Comment =
+ new Comment(pos, raw) {
+ val isExpanded = expanded
+ val usecases = usc
+ }
+ }
+
+ abstract case class UseCase(comment: Comment, code: String, codePos: Position) {
+ /** Set by typer */
+ var tpdCode: tpd.DefDef = _
+
+ def untpdCode: untpd.Tree
+ }
+
+ object UseCase {
+ def apply(comment: Comment, code: String, codePos: Position)(implicit ctx: Context) =
+ new UseCase(comment, code, codePos) {
+ val untpdCode = {
+ val tree = new Parser(new SourceFile("<usecase>", code)).localDef(codePos.start, EmptyFlags)
+
+ tree match {
+ case tree: untpd.DefDef =>
+ val newName = (tree.name.show + "$" + codePos + "$doc").toTermName
+ untpd.DefDef(newName, tree.tparams, tree.vparamss, tree.tpt, tree.rhs)
+ case _ =>
+ ctx.error(ProperDefinitionNotFound(), codePos)
+ tree
+ }
+ }
+ }
+ }
+
+ /**
+ * Port of DocComment.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+ class CommentExpander {
+ import dotc.config.Printers.dottydoc
+ import scala.collection.mutable
+
+ def expand(sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val parent = if (site != NoSymbol) site else sym
+ defineVariables(parent)
+ expandedDocComment(sym, parent)
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ *
+ * @param sym The symbol for which doc comment is returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(implicit ctx: Context): String = {
+ // when parsing a top level class or module, use the (module-)class itself to look up variable definitions
+ val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym
+ else site
+ expandVariables(cookedDocComment(sym, docStr), sym, parent)
+ }
+
+ private def template(raw: String): String =
+ removeSections(raw, "@define")
+
+ private def defines(raw: String): List[String] = {
+ val sections = tagIndex(raw)
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ defines map { case (start, end) => raw.substring(start, end) }
+ }
+
+ private def replaceInheritDocToInheritdoc(docStr: String): String =
+ docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc")
+
+ /** The cooked doc comment of an overridden symbol */
+ protected def superComment(sym: Symbol)(implicit ctx: Context): Option[String] =
+ allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "")
+
+ private val cookedDocComments = mutable.HashMap[Symbol, String]()
+
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
+ * missing sections of an inherited doc comment.
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the doc comment of the overridden version is copied instead.
+ */
+ def cookedDocComment(sym: Symbol, docStr: String = "")(implicit ctx: Context): String = cookedDocComments.getOrElseUpdate(sym, {
+ var ownComment =
+ if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("")
+ else template(docStr)
+ ownComment = replaceInheritDocToInheritdoc(ownComment)
+
+ superComment(sym) match {
+ case None =>
+ // SI-8210 - The warning would be false negative when this symbol is a setter
+ if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter)
+ dottydoc.println(s"${sym.pos}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
+ }
+ })
+
+ private def isMovable(str: String, sec: (Int, Int)): Boolean =
+ startsWithTag(str, sec, "@param") ||
+ startsWithTag(str, sec, "@tparam") ||
+ startsWithTag(str, sec, "@return")
+
+ def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+
+ if (copyFirstPara) {
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
+ copied = 3
+ tocopy = 3
+ }
+
+ def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match {
+ case Some((start, end)) =>
+ if (end > tocopy) tocopy = end
+ case None =>
+ srcSec match {
+ case Some((start1, end1)) => {
+ out append dst.substring(copied, tocopy).trim
+ out append "\n"
+ copied = tocopy
+ out append src.substring(start1, end1).trim
+ }
+ case None =>
+ }
+ }
+
+ //TODO: enable this once you know how to get `sym.paramss`
+ /*
+ for (params <- sym.paramss; param <- params)
+ mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
+ for (tparam <- sym.typeParams)
+ mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
+
+ mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+ mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections))
+ */
+
+ if (out.length == 0) dst
+ else {
+ out append dst.substring(copied)
+ out.toString
+ }
+ }
+
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
+ */
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
+ else {
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
+
+ def getParentSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ dottydoc.println(s"""${sym.pos}: the """" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.")
+ "<invalid inheritdoc annotation>"
+ }
+
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
+
+ // Append sections
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
+
+ out.append("*/")
+ out.toString
+ }
+
+ protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val expandLimit = 10
+
+ def expandInternal(str: String, depth: Int): String = {
+ if (depth >= expandLimit)
+ throw new ExpansionLimitExceeded(str)
+
+ val out = new StringBuilder
+ var copied, idx = 0
+ // excluding variables written as \$foo so we can use them when
+ // necessary to document things like Symbol#decode
+ def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\'
+ while (idx < str.length) {
+ if ((str charAt idx) != '$' || isEscaped)
+ idx += 1
+ else {
+ val vstart = idx
+ idx = skipVariable(str, idx + 1)
+ def replaceWith(repl: String) = {
+ out append str.substring(copied, vstart)
+ out append repl
+ copied = idx
+ }
+ variableName(str.substring(vstart + 1, idx)) match {
+ case "super" =>
+ superComment(sym) foreach { sc =>
+ val superSections = tagIndex(sc)
+ replaceWith(sc.substring(3, startTag(sc, superSections)))
+ for (sec @ (start, end) <- superSections)
+ if (!isMovable(sc, sec)) out append sc.substring(start, end)
+ }
+ case "" => idx += 1
+ case vname =>
+ lookupVariable(vname, site) match {
+ case Some(replacement) => replaceWith(replacement)
+ case None =>
+ dottydoc.println(s"Variable $vname undefined in comment for $sym in $site")
+ }
+ }
+ }
+ }
+ if (out.length == 0) str
+ else {
+ out append str.substring(copied)
+ expandInternal(out.toString, depth + 1)
+ }
+ }
+
+ // We suppressed expanding \$ throughout the recursion, and now we
+ // need to replace \$ with $ so it looks as intended.
+ expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$")
+ }
+
+ def defineVariables(sym: Symbol)(implicit ctx: Context) = {
+ val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r
+
+ val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("")
+ defs(sym) ++= defines(raw).map {
+ str => {
+ val start = skipWhitespace(str, "@define".length)
+ val (key, value) = str.splitAt(skipVariable(str, start))
+ key.drop(start) -> value
+ }
+ } map {
+ case (key, Trim(value)) =>
+ variableName(key) -> value.replaceAll("\\s+\\*+$", "")
+ }
+ }
+
+ /** Maps symbols to the variable -> replacement maps that are defined
+ * in their doc comments
+ */
+ private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map()
+
+ /** Lookup definition of variable.
+ *
+ * @param vble The variable for which a definition is searched
+ * @param site The class for which doc comments are generated
+ */
+ def lookupVariable(vble: String, site: Symbol)(implicit ctx: Context): Option[String] = site match {
+ case NoSymbol => None
+ case _ =>
+ val searchList =
+ if (site.flags.is(Flags.Module)) site :: site.info.baseClasses
+ else site.info.baseClasses
+
+ searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
+ }
+ }
+
+ /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the position of the doc comment of the overridden version is returned instead.
+ */
+ def docCommentPos(sym: Symbol)(implicit ctx: Context): Position =
+ ctx.docCtx.flatMap(_.docstring(sym).map(_.pos)).getOrElse(NoPosition)
+
+ /** A version which doesn't consider self types, as a temporary measure:
+ * an infinite loop has broken out between superComment and cookedDocComment
+ * since r23926.
+ */
+ private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = {
+ if (!sym.owner.isClass) Nil
+ else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..`
+ //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
+ }
+
+ class ExpansionLimitExceeded(str: String) extends Exception
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala
new file mode 100644
index 000000000..1892e4bdc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Constants.scala
@@ -0,0 +1,235 @@
+package dotty.tools.dotc
+package core
+
+import Types._, Symbols._, Contexts._
+import printing.Printer
+
+object Constants {
+
+ final val NoTag = 0
+ final val UnitTag = 1
+ final val BooleanTag = 2
+ final val ByteTag = 3
+ final val ShortTag = 4
+ final val CharTag = 5
+ final val IntTag = 6
+ final val LongTag = 7
+ final val FloatTag = 8
+ final val DoubleTag = 9
+ final val StringTag = 10
+ final val NullTag = 11
+ final val ClazzTag = 12
+ // For supporting java enumerations inside java annotations (see ClassfileParser)
+ final val EnumTag = 13
+
+ case class Constant(value: Any) extends printing.Showable {
+ import java.lang.Double.doubleToRawLongBits
+ import java.lang.Float.floatToRawIntBits
+
+ val tag: Int = value match {
+ case null => NullTag
+ case x: Unit => UnitTag
+ case x: Boolean => BooleanTag
+ case x: Byte => ByteTag
+ case x: Short => ShortTag
+ case x: Int => IntTag
+ case x: Long => LongTag
+ case x: Float => FloatTag
+ case x: Double => DoubleTag
+ case x: String => StringTag
+ case x: Char => CharTag
+ case x: Type => ClazzTag
+ case x: Symbol => EnumTag
+ case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass)
+ }
+
+ def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue
+ def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue
+ def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue
+ def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag
+ def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag
+ def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag
+ def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
+ def isNonUnitAnyVal = BooleanTag <= tag && tag <= DoubleTag
+ def isAnyVal = UnitTag <= tag && tag <= DoubleTag
+
+ def tpe(implicit ctx: Context): Type = tag match {
+ case UnitTag => defn.UnitType
+ case BooleanTag => defn.BooleanType
+ case ByteTag => defn.ByteType
+ case ShortTag => defn.ShortType
+ case CharTag => defn.CharType
+ case IntTag => defn.IntType
+ case LongTag => defn.LongType
+ case FloatTag => defn.FloatType
+ case DoubleTag => defn.DoubleType
+ case StringTag => defn.StringType
+ case NullTag => defn.NullType
+ case ClazzTag => defn.ClassType(typeValue)
+ case EnumTag => defn.EnumType(symbolValue)
+ }
+
+ /** We need the equals method to take account of tags as well as values.
+ */
+ override def equals(other: Any): Boolean = other match {
+ case that: Constant =>
+ this.tag == that.tag && equalHashValue == that.equalHashValue
+ case _ => false
+ }
+
+ def isNaN = value match {
+ case f: Float => f.isNaN
+ case d: Double => d.isNaN
+ case _ => false
+ }
+
+ def booleanValue: Boolean =
+ if (tag == BooleanTag) value.asInstanceOf[Boolean]
+ else throw new Error("value " + value + " is not a boolean")
+
+ def byteValue: Byte = tag match {
+ case ByteTag => value.asInstanceOf[Byte]
+ case ShortTag => value.asInstanceOf[Short].toByte
+ case CharTag => value.asInstanceOf[Char].toByte
+ case IntTag => value.asInstanceOf[Int].toByte
+ case LongTag => value.asInstanceOf[Long].toByte
+ case FloatTag => value.asInstanceOf[Float].toByte
+ case DoubleTag => value.asInstanceOf[Double].toByte
+ case _ => throw new Error("value " + value + " is not a Byte")
+ }
+
+ def shortValue: Short = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toShort
+ case ShortTag => value.asInstanceOf[Short]
+ case CharTag => value.asInstanceOf[Char].toShort
+ case IntTag => value.asInstanceOf[Int].toShort
+ case LongTag => value.asInstanceOf[Long].toShort
+ case FloatTag => value.asInstanceOf[Float].toShort
+ case DoubleTag => value.asInstanceOf[Double].toShort
+ case _ => throw new Error("value " + value + " is not a Short")
+ }
+
+ def charValue: Char = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toChar
+ case ShortTag => value.asInstanceOf[Short].toChar
+ case CharTag => value.asInstanceOf[Char]
+ case IntTag => value.asInstanceOf[Int].toChar
+ case LongTag => value.asInstanceOf[Long].toChar
+ case FloatTag => value.asInstanceOf[Float].toChar
+ case DoubleTag => value.asInstanceOf[Double].toChar
+ case _ => throw new Error("value " + value + " is not a Char")
+ }
+
+ def intValue: Int = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toInt
+ case ShortTag => value.asInstanceOf[Short].toInt
+ case CharTag => value.asInstanceOf[Char].toInt
+ case IntTag => value.asInstanceOf[Int]
+ case LongTag => value.asInstanceOf[Long].toInt
+ case FloatTag => value.asInstanceOf[Float].toInt
+ case DoubleTag => value.asInstanceOf[Double].toInt
+ case _ => throw new Error("value " + value + " is not an Int")
+ }
+
+ def longValue: Long = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toLong
+ case ShortTag => value.asInstanceOf[Short].toLong
+ case CharTag => value.asInstanceOf[Char].toLong
+ case IntTag => value.asInstanceOf[Int].toLong
+ case LongTag => value.asInstanceOf[Long]
+ case FloatTag => value.asInstanceOf[Float].toLong
+ case DoubleTag => value.asInstanceOf[Double].toLong
+ case _ => throw new Error("value " + value + " is not a Long")
+ }
+
+ def floatValue: Float = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toFloat
+ case ShortTag => value.asInstanceOf[Short].toFloat
+ case CharTag => value.asInstanceOf[Char].toFloat
+ case IntTag => value.asInstanceOf[Int].toFloat
+ case LongTag => value.asInstanceOf[Long].toFloat
+ case FloatTag => value.asInstanceOf[Float]
+ case DoubleTag => value.asInstanceOf[Double].toFloat
+ case _ => throw new Error("value " + value + " is not a Float")
+ }
+
+ def doubleValue: Double = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toDouble
+ case ShortTag => value.asInstanceOf[Short].toDouble
+ case CharTag => value.asInstanceOf[Char].toDouble
+ case IntTag => value.asInstanceOf[Int].toDouble
+ case LongTag => value.asInstanceOf[Long].toDouble
+ case FloatTag => value.asInstanceOf[Float].toDouble
+ case DoubleTag => value.asInstanceOf[Double]
+ case _ => throw new Error("value " + value + " is not a Double")
+ }
+
+ /** Convert constant value to conform to given type.
+ */
+ def convertTo(pt: Type)(implicit ctx: Context): Constant = {
+ def classBound(pt: Type): Type = pt.dealias.stripTypeVar match {
+ case tref: TypeRef if !tref.symbol.isClass => classBound(tref.info.bounds.lo)
+ case param: PolyParam =>
+ ctx.typerState.constraint.entry(param) match {
+ case TypeBounds(lo, hi) =>
+ if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound
+ else classBound(lo)
+ case NoType => classBound(param.binder.paramBounds(param.paramNum).lo)
+ case inst => classBound(inst)
+ }
+ case pt => pt
+ }
+ val target = classBound(pt).typeSymbol
+ if (target == tpe.typeSymbol)
+ this
+ else if ((target == defn.ByteClass) && isByteRange)
+ Constant(byteValue)
+ else if (target == defn.ShortClass && isShortRange)
+ Constant(shortValue)
+ else if (target == defn.CharClass && isCharRange)
+ Constant(charValue)
+ else if (target == defn.IntClass && isIntRange)
+ Constant(intValue)
+ else if (target == defn.LongClass && isLongRange)
+ Constant(longValue)
+ else if (target == defn.FloatClass && isFloatRange)
+ Constant(floatValue)
+ else if (target == defn.DoubleClass && isNumeric)
+ Constant(doubleValue)
+ else
+ null
+ }
+
+ def stringValue: String = value.toString
+
+ def toText(printer: Printer) = printer.toText(this)
+
+ def typeValue: Type = value.asInstanceOf[Type]
+ def symbolValue: Symbol = value.asInstanceOf[Symbol]
+
+ /**
+ * Consider two `NaN`s to be identical, despite non-equality
+ * Consider -0d to be distinct from 0d, despite equality
+ *
+ * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`)
+ * to avoid treating different encodings of `NaN` as the same constant.
+ * You probably can't express different `NaN` varieties as compile time
+ * constants in regular Scala code, but it is conceivable that you could
+ * conjure them with a macro.
+ */
+ private def equalHashValue: Any = value match {
+ case f: Float => floatToRawIntBits(f)
+ case d: Double => doubleToRawLongBits(d)
+ case v => v
+ }
+
+ override def hashCode: Int = {
+ import scala.util.hashing.MurmurHash3._
+ val seed = 17
+ var h = seed
+ h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
+ h = mix(h, equalHashValue.##)
+ finalizeHash(h, length = 2)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala
new file mode 100644
index 000000000..c99b748b7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala
@@ -0,0 +1,154 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._
+import util.SimpleMap
+import collection.mutable
+import printing.{Printer, Showable}
+import printing.Texts._
+import config.Config
+import config.Printers.constr
+
+/** Constraint over undetermined type parameters. Constraints are built
+ * over values of the following types:
+ *
+ * - PolyType A constraint constrains the type parameters of a set of PolyTypes
+ * - PolyParam The parameters of the constrained polytypes
+ * - TypeVar Every constrained parameter might be associated with a TypeVar
+ * that has the PolyParam as origin.
+ */
+abstract class Constraint extends Showable {
+
+ type This <: Constraint
+
+ /** Does the constraint's domain contain the type parameters of `pt`? */
+ def contains(pt: PolyType): Boolean
+
+ /** Does the constraint's domain contain the type parameter `param`? */
+ def contains(param: PolyParam): Boolean
+
+ /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */
+ def contains(tvar: TypeVar): Boolean
+
+ /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of
+ * the constraint domain. Note: Low level, implementation dependent.
+ */
+ def entry(param: PolyParam): Type
+
+ /** The type variable corresponding to parameter `param`, or
+ * NoType, if `param` is not in constrained or is not paired with a type variable.
+ */
+ def typeVarOfParam(param: PolyParam): Type
+
+ /** Is it known that `param1 <:< param2`? */
+ def isLess(param1: PolyParam, param2: PolyParam): Boolean
+
+ /** The parameters that are known to be smaller wrt <: than `param` */
+ def lower(param: PolyParam): List[PolyParam]
+
+ /** The parameters that are known to be greater wrt <: than `param` */
+ def upper(param: PolyParam): List[PolyParam]
+
+ /** lower(param) \ lower(butNot) */
+ def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam]
+
+ /** upper(param) \ upper(butNot) */
+ def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam]
+
+ /** The constraint bounds for given type parameter `param`.
+ * Poly params that are known to be smaller or greater than `param`
+ * are not contained in the return bounds.
+ * @pre `param` is not part of the constraint domain.
+ */
+ def nonParamBounds(param: PolyParam): TypeBounds
+
+ /** The lower bound of `param` including all known-to-be-smaller parameters */
+ def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type
+
+ /** The upper bound of `param` including all known-to-be-greater parameters */
+ def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type
+
+ /** The bounds of `param` including all known-to-be-smaller and -greater parameters */
+ def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds
+
+ /** A new constraint which is derived from this constraint by adding
+ * entries for all type parameters of `poly`.
+ * @param tvars A list of type variables associated with the params,
+ * or Nil if the constraint will just be checked for
+ * satisfiability but will solved to give instances of
+ * type variables.
+ */
+ def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This
+
+ /** A new constraint which is derived from this constraint by updating
+ * the entry for parameter `param` to `tp`.
+ * `tp` can be one of the following:
+ *
+ * - A TypeBounds value, indicating new constraint bounds
+ * - Another type, indicating a solution for the parameter
+ *
+ * @pre `this contains param`.
+ */
+ def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This
+
+ /** A constraint that includes the relationship `p1 <: p2`.
+ * `<:` relationships between parameters ("edges") are propagated, but
+ * non-parameter bounds are left alone.
+ */
+ def addLess(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This
+
+ /** A constraint resulting from adding p2 = p1 to this constraint, and at the same
+ * time transferring all bounds of p2 to p1
+ */
+ def unify(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This
+
+ /** A new constraint which is derived from this constraint by removing
+ * the type parameter `param` from the domain and replacing all top-level occurrences
+ * of the parameter elsewhere in the constraint by type `tp`, or a conservative
+ * approximation of it if that is needed to avoid cycles.
+ * Occurrences nested inside a refinement or prefix are not affected.
+ */
+ def replace(param: PolyParam, tp: Type)(implicit ctx: Context): This
+
+ /** Narrow one of the bounds of type parameter `param`
+ * If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure
+ * that `param >: bound`.
+ */
+ def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This
+
+ /** Is entry associated with `pt` removable? This is the case if
+ * all type parameters of the entry are associated with type variables
+ * which have their `inst` fields set.
+ */
+ def isRemovable(pt: PolyType): Boolean
+
+ /** A new constraint with all entries coming from `pt` removed. */
+ def remove(pt: PolyType)(implicit ctx: Context): This
+
+ /** The polytypes constrained by this constraint */
+ def domainPolys: List[PolyType]
+
+ /** The polytype parameters constrained by this constraint */
+ def domainParams: List[PolyParam]
+
+ /** Check whether predicate holds for all parameters in constraint */
+ def forallParams(p: PolyParam => Boolean): Boolean
+
+ /** Perform operation `op` on all typevars, or only on uninstantiated
+ * typevars, depending on whether `uninstOnly` is set or not.
+ */
+ def foreachTypeVar(op: TypeVar => Unit): Unit
+
+ /** The uninstantiated typevars of this constraint */
+ def uninstVars: collection.Seq[TypeVar]
+
+ /** The weakest constraint that subsumes both this constraint and `other` */
+ def & (other: Constraint)(implicit ctx: Context): Constraint
+
+ /** Check that no constrained parameter contains itself as a bound */
+ def checkNonCyclic()(implicit ctx: Context): Unit
+
+ /** Check that constraint only refers to PolyParams bound by itself */
+ def checkClosed()(implicit ctx: Context): Unit
+}
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
new file mode 100644
index 000000000..0e155b9e1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -0,0 +1,458 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._
+import Decorators._
+import config.Config
+import config.Printers.{constr, typr}
+import TypeApplications.EtaExpansion
+import collection.mutable
+
+/** Methods for adding constraints and solving them.
+ *
+ * What goes into a Constraint as opposed to a ConstrainHandler?
+ *
+ * Constraint code is purely functional: Operations get constraints and produce new ones.
+ * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done
+ * elsewhere.
+ *
+ * By comparison: Constraint handlers are parts of type comparers and can use their functionality.
+ * Constraint handlers update the current constraint as a side effect.
+ */
+trait ConstraintHandling {
+
+ implicit val ctx: Context
+
+ protected def isSubType(tp1: Type, tp2: Type): Boolean
+ protected def isSameType(tp1: Type, tp2: Type): Boolean
+
+ val state: TyperState
+ import state.constraint
+
+ private var addConstraintInvocations = 0
+
+ /** If the constraint is frozen we cannot add new bounds to the constraint. */
+ protected var frozenConstraint = false
+
+ protected var alwaysFluid = false
+
+ /** Perform `op` in a mode where all attempts to set `frozen` to true are ignored */
+ def fluidly[T](op: => T): T = {
+ val saved = alwaysFluid
+ alwaysFluid = true
+ try op finally alwaysFluid = saved
+ }
+
+ /** We are currently comparing polytypes. Used as a flag for
+ * optimization: when `false`, no need to do an expensive `pruneLambdaParams`
+ */
+ protected var comparedPolyTypes: Set[PolyType] = Set.empty
+
+ private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean =
+ !constraint.contains(param) || {
+ def occursIn(bound: Type): Boolean = {
+ val b = bound.dealias
+ (b eq param) || {
+ b match {
+ case b: AndOrType => occursIn(b.tp1) || occursIn(b.tp2)
+ case b: TypeVar => occursIn(b.origin)
+ case _ => false
+ }
+ }
+ }
+ if (Config.checkConstraintsSeparated)
+ assert(!occursIn(bound), s"$param occurs in $bound")
+ val c1 = constraint.narrowBound(param, bound, isUpper)
+ (c1 eq constraint) || {
+ constraint = c1
+ val TypeBounds(lo, hi) = constraint.entry(param)
+ isSubType(lo, hi)
+ }
+ }
+
+ protected def addUpperBound(param: PolyParam, bound: Type): Boolean = {
+ def description = i"constraint $param <: $bound to\n$constraint"
+ if (bound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable) {
+ def msg = s"!!! instantiated to Nothing: $param, constraint = ${constraint.show}"
+ if (Config.failOnInstantiationToNothing) assert(false, msg)
+ else ctx.log(msg)
+ }
+ constr.println(i"adding $description")
+ val lower = constraint.lower(param)
+ val res =
+ addOneBound(param, bound, isUpper = true) &&
+ lower.forall(addOneBound(_, bound, isUpper = true))
+ constr.println(i"added $description = $res")
+ res
+ }
+
+ protected def addLowerBound(param: PolyParam, bound: Type): Boolean = {
+ def description = i"constraint $param >: $bound to\n$constraint"
+ constr.println(i"adding $description")
+ val upper = constraint.upper(param)
+ val res =
+ addOneBound(param, bound, isUpper = false) &&
+ upper.forall(addOneBound(_, bound, isUpper = false))
+ constr.println(i"added $description = $res")
+ res
+ }
+
+ protected def addLess(p1: PolyParam, p2: PolyParam): Boolean = {
+ def description = i"ordering $p1 <: $p2 to\n$constraint"
+ val res =
+ if (constraint.isLess(p2, p1)) unify(p2, p1)
+ else {
+ val down1 = p1 :: constraint.exclusiveLower(p1, p2)
+ val up2 = p2 :: constraint.exclusiveUpper(p2, p1)
+ val lo1 = constraint.nonParamBounds(p1).lo
+ val hi2 = constraint.nonParamBounds(p2).hi
+ constr.println(i"adding $description down1 = $down1, up2 = $up2")
+ constraint = constraint.addLess(p1, p2)
+ down1.forall(addOneBound(_, hi2, isUpper = true)) &&
+ up2.forall(addOneBound(_, lo1, isUpper = false))
+ }
+ constr.println(i"added $description = $res")
+ res
+ }
+
+ /** Make p2 = p1, transfer all bounds of p2 to p1
+ * @pre less(p1)(p2)
+ */
+ private def unify(p1: PolyParam, p2: PolyParam): Boolean = {
+ constr.println(s"unifying $p1 $p2")
+ assert(constraint.isLess(p1, p2))
+ val down = constraint.exclusiveLower(p2, p1)
+ val up = constraint.exclusiveUpper(p1, p2)
+ constraint = constraint.unify(p1, p2)
+ val bounds = constraint.nonParamBounds(p1)
+ val lo = bounds.lo
+ val hi = bounds.hi
+ isSubType(lo, hi) &&
+ down.forall(addOneBound(_, hi, isUpper = true)) &&
+ up.forall(addOneBound(_, lo, isUpper = false))
+ }
+
+ final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
+ val saved = frozenConstraint
+ frozenConstraint = !alwaysFluid
+ try isSubType(tp1, tp2)
+ finally frozenConstraint = saved
+ }
+
+ final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
+ val saved = frozenConstraint
+ frozenConstraint = !alwaysFluid
+ try isSameType(tp1, tp2)
+ finally frozenConstraint = saved
+ }
+
+ /** Test whether the lower bounds of all parameters in this
+ * constraint are a solution to the constraint.
+ */
+ protected final def isSatisfiable: Boolean =
+ constraint.forallParams { param =>
+ val TypeBounds(lo, hi) = constraint.entry(param)
+ isSubType(lo, hi) || {
+ ctx.log(i"sub fail $lo <:< $hi")
+ false
+ }
+ }
+
+ /** Solve constraint set for given type parameter `param`.
+ * If `fromBelow` is true the parameter is approximated by its lower bound,
+ * otherwise it is approximated by its upper bound. However, any occurrences
+ * of the parameter in a refinement somewhere in the bound are removed. Also
+ * wildcard types in bounds are approximated by their upper or lower bounds.
+ * (Such occurrences can arise for F-bounded types).
+ * The constraint is left unchanged.
+ * @return the instantiating type
+ * @pre `param` is in the constraint's domain.
+ */
+ final def approximation(param: PolyParam, fromBelow: Boolean): Type = {
+ val avoidParam = new TypeMap {
+ override def stopAtStatic = true
+ def apply(tp: Type) = mapOver {
+ tp match {
+ case tp: RefinedType if param occursIn tp.refinedInfo => tp.parent
+ case tp: WildcardType =>
+ val bounds = tp.optBounds.orElse(TypeBounds.empty).bounds
+ // Try to instantiate the wildcard to a type that is known to conform to it.
+ // This means:
+ // If fromBelow is true, we minimize the type overall
+ // Hence, if variance < 0, pick the maximal safe type: bounds.lo
+ // (i.e. the whole bounds range is over the type)
+ // if variance > 0, pick the minimal safe type: bounds.hi
+ // (i.e. the whole bounds range is under the type)
+ // if variance == 0, pick bounds.lo anyway (this is arbitrary but in line with
+ // the principle that we pick the smaller type when in doubt).
+ // If fromBelow is false, we maximize the type overall and reverse the bounds
+ // if variance != 0. For variance == 0, we still minimize.
+ // In summary we pick the bound given by this table:
+ //
+ // variance | -1 0 1
+ // ------------------------
+ // from below | lo lo hi
+ // from above | hi lo lo
+ //
+ if (variance == 0 || fromBelow == (variance < 0)) bounds.lo else bounds.hi
+ case _ => tp
+ }
+ }
+ }
+ assert(constraint.contains(param))
+ val bound = if (fromBelow) constraint.fullLowerBound(param) else constraint.fullUpperBound(param)
+ val inst = avoidParam(bound)
+ typr.println(s"approx ${param.show}, from below = $fromBelow, bound = ${bound.show}, inst = ${inst.show}")
+ inst
+ }
+
+ /** The instance type of `param` in the current constraint (which contains `param`).
+ * If `fromBelow` is true, the instance type is the lub of the parameter's
+ * lower bounds; otherwise it is the glb of its upper bounds. However,
+ * a lower bound instantiation can be a singleton type only if the upper bound
+ * is also a singleton type.
+ */
+ def instanceType(param: PolyParam, fromBelow: Boolean): Type = {
+ def upperBound = constraint.fullUpperBound(param)
+ def isSingleton(tp: Type): Boolean = tp match {
+ case tp: SingletonType => true
+ case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2)
+ case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2)
+ case _ => false
+ }
+ def isFullyDefined(tp: Type): Boolean = tp match {
+ case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
+ case tp: TypeProxy => isFullyDefined(tp.underlying)
+ case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
+ case _ => true
+ }
+ def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match {
+ case tp: OrType => true
+ case tp: RefinedOrRecType => isOrType(tp.parent)
+ case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
+ case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
+ case _ => false
+ }
+
+ // First, solve the constraint.
+ var inst = approximation(param, fromBelow)
+
+ // Then, approximate by (1.) - (3.) and simplify as follows.
+ // 1. If instance is from below and is a singleton type, yet
+ // upper bound is not a singleton type, widen the instance.
+ if (fromBelow && isSingleton(inst) && !isSingleton(upperBound))
+ inst = inst.widen
+
+ inst = inst.simplified
+
+ // 2. If instance is from below and is a fully-defined union type, yet upper bound
+ // is not a union type, approximate the union type from above by an intersection
+ // of all common base types.
+ if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound))
+ inst = ctx.harmonizeUnion(inst)
+
+ // 3. If instance is from below, and upper bound has open named parameters
+ // make sure the instance has all named parameters of the bound.
+ if (fromBelow) inst = inst.widenToNamedTypeParams(param.namedTypeParams)
+ inst
+ }
+
+ /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have
+ * for all poly params `p` defined in `c2` as `p >: L2 <: U2`:
+ *
+ * c1 defines p with bounds p >: L1 <: U1, and
+ * L2 <: L1, and
+ * U1 <: U2
+ *
+ * Both `c1` and `c2` are required to derive from constraint `pre`, possibly
+ * narrowing it with further bounds.
+ */
+ protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint): Boolean =
+ if (c2 eq pre) true
+ else if (c1 eq pre) false
+ else {
+ val saved = constraint
+ try
+ c2.forallParams(p =>
+ c1.contains(p) &&
+ c2.upper(p).forall(c1.isLess(p, _)) &&
+ isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)))
+ finally constraint = saved
+ }
+
+ /** The current bounds of type parameter `param` */
+ final def bounds(param: PolyParam): TypeBounds = {
+ val e = constraint.entry(param)
+ if (e.exists) e.bounds else param.binder.paramBounds(param.paramNum)
+ }
+
+ /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
+ * and propagate all bounds.
+ * @param tvars See Constraint#add
+ */
+ def addToConstraint(pt: PolyType, tvars: List[TypeVar]): Unit =
+ assert {
+ checkPropagated(i"initialized $pt") {
+ constraint = constraint.add(pt, tvars)
+ pt.paramNames.indices.forall { i =>
+ val param = PolyParam(pt, i)
+ val bounds = constraint.nonParamBounds(param)
+ val lower = constraint.lower(param)
+ val upper = constraint.upper(param)
+ if (lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) ||
+ upper.nonEmpty && !bounds.hi.isRef(defn.AnyClass)) constr.println(i"INIT*** $pt")
+ lower.forall(addOneBound(_, bounds.hi, isUpper = true)) &&
+ upper.forall(addOneBound(_, bounds.lo, isUpper = false))
+ }
+ }
+ }
+
+ /** Can `param` be constrained with new bounds? */
+ final def canConstrain(param: PolyParam): Boolean =
+ !frozenConstraint && (constraint contains param)
+
+ /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise.
+ * `bound` is assumed to be in normalized form, as specified in `firstTry` and
+ * `secondTry` of `TypeComparer`. In particular, it should not be an alias type,
+ * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may
+ * not be AndTypes and lower bounds may not be OrTypes. This is assured by the
+ * way isSubType is organized.
+ */
+ protected def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean = {
+ def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint"
+ //checkPropagated(s"adding $description")(true) // DEBUG in case following fails
+ checkPropagated(s"added $description") {
+ addConstraintInvocations += 1
+
+ /** When comparing lambdas we might get constraints such as
+ * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter
+ * and `X0` is a lambda parameter. The constraint for `A` is not allowed
+ * to refer to such a lambda parameter because the lambda parameter is
+ * not visible where `A` is defined. Consequently, we need to
+ * approximate the bound so that the lambda parameter does not appear in it.
+ * If `tp` is an upper bound, we need to approximate with something smaller,
+ * otherwise something larger.
+ * Test case in pos/i94-nada.scala. This test crashes with an illegal instance
+ * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is
+ * missing.
+ */
+ def pruneLambdaParams(tp: Type) =
+ if (comparedPolyTypes.nonEmpty) {
+ val approx = new ApproximatingTypeMap {
+ def apply(t: Type): Type = t match {
+ case t @ PolyParam(pt: PolyType, n) if comparedPolyTypes contains pt =>
+ val effectiveVariance = if (fromBelow) -variance else variance
+ val bounds = pt.paramBounds(n)
+ if (effectiveVariance > 0) bounds.lo
+ else if (effectiveVariance < 0) bounds.hi
+ else NoType
+ case _ =>
+ mapOver(t)
+ }
+ }
+ approx(tp)
+ }
+ else tp
+
+ def addParamBound(bound: PolyParam) =
+ if (fromBelow) addLess(bound, param) else addLess(param, bound)
+
+ /** Drop all constrained parameters that occur at the toplevel in `bound` and
+ * handle them by `addLess` calls.
+ * The preconditions make sure that such parameters occur only
+ * in one of two ways:
+ *
+ * 1.
+ *
+ * P <: Ts1 | ... | Tsm (m > 0)
+ * Tsi = T1 & ... Tn (n >= 0)
+ * Some of the Ti are constrained parameters
+ *
+ * 2.
+ *
+ * Ts1 & ... & Tsm <: P (m > 0)
+ * Tsi = T1 | ... | Tn (n >= 0)
+ * Some of the Ti are constrained parameters
+ *
+ * In each case we cannot leave the parameter in place,
+ * because that would risk making a parameter later a subtype or supertype
+ * of a bound where the parameter occurs again at toplevel, which leads to cycles
+ * in the subtyping test. So we intentionally narrow the constraint by
+ * recording an isLess relationship instead (even though this is not implied
+ * by the bound).
+ *
+ * Narrowing a constraint is better than widening it, because narrowing leads
+ * to incompleteness (which we face anyway, see for instance eitherIsSubType)
+ * but widening leads to unsoundness.
+ *
+ * A test case that demonstrates the problem is i864.scala.
+ * Turn Config.checkConstraintsSeparated on to get an accurate diagnostic
+ * of the cycle when it is created.
+ *
+ * @return The pruned type if all `addLess` calls succeed, `NoType` otherwise.
+ */
+ def prune(bound: Type): Type = bound match {
+ case bound: AndOrType =>
+ val p1 = prune(bound.tp1)
+ val p2 = prune(bound.tp2)
+ if (p1.exists && p2.exists) bound.derivedAndOrType(p1, p2)
+ else NoType
+ case bound: TypeVar if constraint contains bound.origin =>
+ prune(bound.underlying)
+ case bound: PolyParam =>
+ constraint.entry(bound) match {
+ case NoType => pruneLambdaParams(bound)
+ case _: TypeBounds =>
+ if (!addParamBound(bound)) NoType
+ else if (fromBelow) defn.NothingType
+ else defn.AnyType
+ case inst =>
+ prune(inst)
+ }
+ case _ =>
+ pruneLambdaParams(bound)
+ }
+
+ try bound match {
+ case bound: PolyParam if constraint contains bound =>
+ addParamBound(bound)
+ case _ =>
+ val pbound = prune(bound)
+ pbound.exists && (
+ if (fromBelow) addLowerBound(param, pbound) else addUpperBound(param, pbound))
+ }
+ finally addConstraintInvocations -= 1
+ }
+ }
+
+ /** Instantiate `param` to `tp` if the constraint stays satisfiable */
+ protected def tryInstantiate(param: PolyParam, tp: Type): Boolean = {
+ val saved = constraint
+ constraint =
+ if (addConstraint(param, tp, fromBelow = true) &&
+ addConstraint(param, tp, fromBelow = false)) constraint.replace(param, tp)
+ else saved
+ constraint ne saved
+ }
+
+ /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */
+ def checkPropagated(msg: => String)(result: Boolean): Boolean = {
+ if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) {
+ val saved = frozenConstraint
+ frozenConstraint = true
+ for (p <- constraint.domainParams) {
+ def check(cond: => Boolean, q: PolyParam, ordering: String, explanation: String): Unit =
+ assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg")
+ for (u <- constraint.upper(p))
+ check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated")
+ for (l <- constraint.lower(p)) {
+ check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated")
+ check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing")
+ }
+ }
+ frozenConstraint = saved
+ }
+ result
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
new file mode 100644
index 000000000..e0f659cc6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
@@ -0,0 +1,17 @@
+package dotty.tools.dotc
+package core
+
+import Contexts._
+import config.Printers.typr
+
+trait ConstraintRunInfo { self: RunInfo =>
+ private var maxSize = 0
+ private var maxConstraint: Constraint = _
+ def recordConstraintSize(c: Constraint, size: Int) =
+ if (size > maxSize) {
+ maxSize = size
+ maxConstraint = c
+ }
+ def printMaxConstraint()(implicit ctx: Context) =
+ if (maxSize > 0) typr.println(s"max constraint = ${maxConstraint.show}")
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala
new file mode 100644
index 000000000..639c4d111
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala
@@ -0,0 +1,709 @@
+package dotty.tools
+package dotc
+package core
+
+import interfaces.CompilerCallback
+import Decorators._
+import Periods._
+import Names._
+import Phases._
+import Types._
+import Symbols._
+import Scopes._
+import NameOps._
+import Uniques._
+import SymDenotations._
+import Comments._
+import Flags.ParamAccessor
+import util.Positions._
+import ast.Trees._
+import ast.untpd
+import util.{FreshNameCreator, SimpleMap, SourceFile, NoSource}
+import typer.{Implicits, ImplicitRunInfo, ImportInfo, Inliner, NamerContextOps, SearchHistory, TypeAssigner, Typer}
+import Implicits.ContextualImplicits
+import config.Settings._
+import config.Config
+import reporting._
+import collection.mutable
+import collection.immutable.BitSet
+import printing._
+import config.{Settings, ScalaSettings, Platform, JavaPlatform}
+import language.implicitConversions
+import DenotTransformers.DenotTransformer
+import util.Property.Key
+import xsbti.AnalysisCallback
+
+object Contexts {
+
+ /** A context is passed basically everywhere in dotc.
+ * This is convenient but carries the risk of captured contexts in
+ * objects that turn into space leaks. To combat this risk, here are some
+ * conventions to follow:
+ *
+ * - Never let an implicit context be an argument of a class whose instances
+ * live longer than the context.
+ * - Classes that need contexts for their initialization take an explicit parameter
+ * named `initctx`. They pass initctx to all positions where it is needed
+ * (and these positions should all be part of the intialization sequence of the class).
+ * - Classes that need contexts that survive initialization are instead passed
+ * a "condensed context", typically named `cctx` (or they create one). Condensed contexts
+ * just add some basic information to the context base without the
+ * risk of capturing complete trees.
+ * - To make sure these rules are kept, it would be good to do a sanity
+ * check using bytecode inspection with javap or scalap: Keep track
+ * of all class fields of type context; allow them only in whitelisted
+ * classes (which should be short-lived).
+ */
+ abstract class Context extends Periods
+ with Substituters
+ with TypeOps
+ with Phases
+ with Printers
+ with Symbols
+ with SymDenotations
+ with Reporting
+ with NamerContextOps
+ with Cloneable { thiscontext =>
+ implicit def ctx: Context = this
+
+ /** The context base at the root */
+ val base: ContextBase
+
+ /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */
+ def outersIterator = new Iterator[Context] {
+ var current = thiscontext
+ def hasNext = current != NoContext
+ def next = { val c = current; current = current.outer; c }
+ }
+
+ /** The outer context */
+ private[this] var _outer: Context = _
+ protected def outer_=(outer: Context) = _outer = outer
+ def outer: Context = _outer
+
+ /** The compiler callback implementation, or null if no callback will be called. */
+ private[this] var _compilerCallback: CompilerCallback = _
+ protected def compilerCallback_=(callback: CompilerCallback) =
+ _compilerCallback = callback
+ def compilerCallback: CompilerCallback = _compilerCallback
+
+ /** The sbt callback implementation if we are run from sbt, null otherwise */
+ private[this] var _sbtCallback: AnalysisCallback = _
+ protected def sbtCallback_=(callback: AnalysisCallback) =
+ _sbtCallback = callback
+ def sbtCallback: AnalysisCallback = _sbtCallback
+
+ /** The current context */
+ private[this] var _period: Period = _
+ protected def period_=(period: Period) = {
+ assert(period.firstPhaseId == period.lastPhaseId, period)
+ _period = period
+ }
+ def period: Period = _period
+
+ /** The scope nesting level */
+ private[this] var _mode: Mode = _
+ protected def mode_=(mode: Mode) = _mode = mode
+ def mode: Mode = _mode
+
+ /** The current type comparer */
+ private[this] var _typerState: TyperState = _
+ protected def typerState_=(typerState: TyperState) = _typerState = typerState
+ def typerState: TyperState = _typerState
+
+ /** The current plain printer */
+ private[this] var _printerFn: Context => Printer = _
+ protected def printerFn_=(printerFn: Context => Printer) = _printerFn = printerFn
+ def printerFn: Context => Printer = _printerFn
+
+ /** The current owner symbol */
+ private[this] var _owner: Symbol = _
+ protected def owner_=(owner: Symbol) = _owner = owner
+ def owner: Symbol = _owner
+
+ /** The current settings values */
+ private[this] var _sstate: SettingsState = _
+ protected def sstate_=(sstate: SettingsState) = _sstate = sstate
+ def sstate: SettingsState = _sstate
+
+ /** The current tree */
+ private[this] var _compilationUnit: CompilationUnit = _
+ protected def compilationUnit_=(compilationUnit: CompilationUnit) = _compilationUnit = compilationUnit
+ def compilationUnit: CompilationUnit = _compilationUnit
+
+ /** The current tree */
+ private[this] var _tree: Tree[_ >: Untyped]= _
+ protected def tree_=(tree: Tree[_ >: Untyped]) = _tree = tree
+ def tree: Tree[_ >: Untyped] = _tree
+
+ /** The current scope */
+ private[this] var _scope: Scope = _
+ protected def scope_=(scope: Scope) = _scope = scope
+ def scope: Scope = _scope
+
+ /** The current type assigner or typer */
+ private[this] var _typeAssigner: TypeAssigner = _
+ protected def typeAssigner_=(typeAssigner: TypeAssigner) = _typeAssigner = typeAssigner
+ def typeAssigner: TypeAssigner = _typeAssigner
+ def typer: Typer = _typeAssigner.asInstanceOf[Typer]
+
+ /** The currently active import info */
+ private[this] var _importInfo: ImportInfo = _
+ protected def importInfo_=(importInfo: ImportInfo) = _importInfo = importInfo
+ def importInfo: ImportInfo = _importInfo
+
+ /** The current compiler-run specific Info */
+ private[this] var _runInfo: RunInfo = _
+ protected def runInfo_=(runInfo: RunInfo) = _runInfo = runInfo
+ def runInfo: RunInfo = _runInfo
+
+ /** An optional diagostics buffer than is used by some checking code
+ * to provide more information in the buffer if it exists.
+ */
+ private var _diagnostics: Option[StringBuilder] = _
+ protected def diagnostics_=(diagnostics: Option[StringBuilder]) = _diagnostics = diagnostics
+ def diagnostics: Option[StringBuilder] = _diagnostics
+
+ /** The current bounds in force for type parameters appearing in a GADT */
+ private var _gadt: GADTMap = _
+ protected def gadt_=(gadt: GADTMap) = _gadt = gadt
+ def gadt: GADTMap = _gadt
+
+ /**The current fresh name creator */
+ private[this] var _freshNames: FreshNameCreator = _
+ protected def freshNames_=(freshNames: FreshNameCreator) = _freshNames = freshNames
+ def freshNames: FreshNameCreator = _freshNames
+
+ def freshName(prefix: String = ""): String = freshNames.newName(prefix)
+ def freshName(prefix: Name): String = freshName(prefix.toString)
+
+ /** A map in which more contextual properties can be stored */
+ private var _moreProperties: Map[Key[Any], Any] = _
+ protected def moreProperties_=(moreProperties: Map[Key[Any], Any]) = _moreProperties = moreProperties
+ def moreProperties: Map[Key[Any], Any] = _moreProperties
+
+ def property[T](key: Key[T]): Option[T] =
+ moreProperties.get(key).asInstanceOf[Option[T]]
+
+ private var _typeComparer: TypeComparer = _
+ protected def typeComparer_=(typeComparer: TypeComparer) = _typeComparer = typeComparer
+ def typeComparer: TypeComparer = {
+ if (_typeComparer.ctx ne this)
+ _typeComparer = _typeComparer.copyIn(this)
+ _typeComparer
+ }
+
+ /** Number of findMember calls on stack */
+ private[core] var findMemberCount: Int = 0
+
+ /** List of names which have a findMemberCall on stack,
+ * after Config.LogPendingFindMemberThreshold is reached.
+ */
+ private[core] var pendingMemberSearches: List[Name] = Nil
+
+ /** The new implicit references that are introduced by this scope */
+ private var implicitsCache: ContextualImplicits = null
+ def implicits: ContextualImplicits = {
+ if (implicitsCache == null )
+ implicitsCache = {
+ val implicitRefs: List[TermRef] =
+ if (isClassDefContext)
+ try owner.thisType.implicitMembers
+ catch {
+ case ex: CyclicReference => Nil
+ }
+ else if (isImportContext) importInfo.importedImplicits
+ else if (isNonEmptyScopeContext) scope.implicitDecls
+ else Nil
+ val outerImplicits =
+ if (isImportContext && importInfo.hiddenRoot.exists)
+ outer.implicits exclude importInfo.hiddenRoot
+ else
+ outer.implicits
+ if (implicitRefs.isEmpty) outerImplicits
+ else new ContextualImplicits(implicitRefs, outerImplicits)(this)
+ }
+ implicitsCache
+ }
+
+ /** The history of implicit searches that are currently active */
+ private var _searchHistory: SearchHistory = null
+ protected def searchHistory_= (searchHistory: SearchHistory) = _searchHistory = searchHistory
+ def searchHistory: SearchHistory = _searchHistory
+
+ /** Those fields are used to cache phases created in withPhase.
+ * phasedCtx is first phase with altered phase ever requested.
+ * phasedCtxs is array that uses phaseId's as indexes,
+ * contexts are created only on request and cached in this array
+ */
+ private var phasedCtx: Context = _
+ private var phasedCtxs: Array[Context] = _
+
+ /** This context at given phase.
+ * This method will always return a phase period equal to phaseId, thus will never return squashed phases
+ */
+ final def withPhase(phaseId: PhaseId): Context =
+ if (this.phaseId == phaseId) this
+ else if (phasedCtx.phaseId == phaseId) phasedCtx
+ else if (phasedCtxs != null && phasedCtxs(phaseId) != null) phasedCtxs(phaseId)
+ else {
+ val ctx1 = fresh.setPhase(phaseId)
+ if (phasedCtx eq this) phasedCtx = ctx1
+ else {
+ if (phasedCtxs == null) phasedCtxs = new Array[Context](base.phases.length)
+ phasedCtxs(phaseId) = ctx1
+ }
+ ctx1
+ }
+
+ final def withPhase(phase: Phase): Context =
+ withPhase(phase.id)
+
+ final def withPhaseNoLater(phase: Phase) =
+ if (phase.exists && ctx.phase.id > phase.id) withPhase(phase) else ctx
+
+ /** If -Ydebug is on, the top of the stack trace where this context
+ * was created, otherwise `null`.
+ */
+ private var creationTrace: Array[StackTraceElement] = _
+
+ private def setCreationTrace() =
+ if (this.settings.YtraceContextCreation.value)
+ creationTrace = (new Throwable).getStackTrace().take(20)
+
+ /** Print all enclosing context's creation stacktraces */
+ def printCreationTraces() = {
+ println("=== context creation trace =======")
+ for (ctx <- outersIterator) {
+ println(s">>>>>>>>> $ctx")
+ if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n"))
+ }
+ println("=== end context creation trace ===")
+ }
+
+ /** The current reporter */
+ def reporter: Reporter = typerState.reporter
+
+ /** Is this a context for the members of a class definition? */
+ def isClassDefContext: Boolean =
+ owner.isClass && (owner ne outer.owner)
+
+ /** Is this a context that introduces an import clause? */
+ def isImportContext: Boolean =
+ (this ne NoContext) && (this.importInfo ne outer.importInfo)
+
+ /** Is this a context that introduces a non-empty scope? */
+ def isNonEmptyScopeContext: Boolean =
+ (this.scope ne outer.scope) && this.scope.nonEmpty
+
+ /** Leave message in diagnostics buffer if it exists */
+ def diagnose(str: => String) =
+ for (sb <- diagnostics) {
+ sb.setLength(0)
+ sb.append(str)
+ }
+
+ /** The next outer context whose tree is a template or package definition */
+ def enclTemplate: Context = {
+ var c = this
+ while (c != NoContext && !c.tree.isInstanceOf[Template[_]] && !c.tree.isInstanceOf[PackageDef[_]])
+ c = c.outer
+ c
+ }
+
+ /** The context for a supercall. This context is used for elaborating
+ * the parents of a class and their arguments.
+ * The context is computed from the current class context. It has
+ *
+ * - as owner: The primary constructor of the class
+ * - as outer context: The context enclosing the class context
+ * - as scope: The parameter accessors in the class context
+ * - with additional mode: InSuperCall
+ *
+ * The reasons for this peculiar choice of attributes are as follows:
+ *
+ * - The constructor must be the owner, because that's where any local methods or closures
+ * should go.
+ * - The context may not see any class members (inherited or defined), and should
+ * instead see definitions defined in the outer context which might be shadowed by
+ * such class members. That's why the outer context must be the outer context of the class.
+ * - At the same time the context should see the parameter accessors of the current class,
+ * that's why they get added to the local scope. An alternative would have been to have the
+ * context see the constructor parameters instead, but then we'd need a final substitution step
+ * from constructor parameters to class parameter accessors.
+ */
+ def superCallContext: Context = {
+ val locals = newScopeWith(owner.asClass.paramAccessors: _*)
+ superOrThisCallContext(owner.primaryConstructor, locals)
+ }
+
+ /** The context for the arguments of a this(...) constructor call.
+ * The context is computed from the local auxiliary constructor context.
+ * It has
+ *
+ * - as owner: The auxiliary constructor
+ * - as outer context: The context enclosing the enclosing class context
+ * - as scope: The parameters of the auxiliary constructor.
+ */
+ def thisCallArgContext: Context = {
+ assert(owner.isClassConstructor)
+ val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next
+ superOrThisCallContext(owner, constrCtx.scope)
+ .setTyperState(typerState)
+ .setGadt(gadt)
+ }
+
+ /** The super- or this-call context with given owner and locals. */
+ private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = {
+ var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next
+ classCtx.outer.fresh.setOwner(owner)
+ .setScope(locals)
+ .setMode(classCtx.mode | Mode.InSuperCall)
+ }
+
+ /** The context of expression `expr` seen as a member of a statement sequence */
+ def exprContext(stat: Tree[_ >: Untyped], exprOwner: Symbol) =
+ if (exprOwner == this.owner) this
+ else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext
+ else ctx.fresh.setOwner(exprOwner)
+
+ /** The current source file; will be derived from current
+ * compilation unit.
+ */
+ def source: SourceFile =
+ if (compilationUnit == null) NoSource else compilationUnit.source
+
+ /** Does current phase use an erased types interpretation? */
+ def erasedTypes: Boolean = phase.erasedTypes
+
+ /** Is the debug option set? */
+ def debug: Boolean = base.settings.debug.value
+
+ /** Is the verbose option set? */
+ def verbose: Boolean = base.settings.verbose.value
+
+ /** Should use colors when printing? */
+ def useColors: Boolean =
+ base.settings.color.value == "always"
+
+ /** A condensed context containing essential information of this but
+ * no outer contexts except the initial context.
+ private var _condensed: CondensedContext = null
+ def condensed: CondensedContext = {
+ if (_condensed eq outer.condensed)
+ _condensed = base.initialCtx.fresh
+ .withPeriod(period)
+ .withNewMode(mode)
+ // typerState and its constraint is not preserved in condensed
+ // reporter is always ThrowingReporter
+ .withPrinterFn(printerFn)
+ .withOwner(owner)
+ .withSettings(sstate)
+ // tree is not preserved in condensed
+ .withRunInfo(runInfo)
+ .withDiagnostics(diagnostics)
+ .withMoreProperties(moreProperties)
+ _condensed
+ }
+ */
+
+ protected def init(outer: Context): this.type = {
+ this.outer = outer
+ this.implicitsCache = null
+ this.phasedCtx = this
+ this.phasedCtxs = null
+ setCreationTrace()
+ this
+ }
+
+ /** A fresh clone of this context. */
+ def fresh: FreshContext = clone.asInstanceOf[FreshContext].init(this)
+
+ final def withOwner(owner: Symbol): Context =
+ if (owner ne this.owner) fresh.setOwner(owner) else this
+
+ override def toString =
+ "Context(\n" +
+ (outersIterator map ( ctx => s" owner = ${ctx.owner}, scope = ${ctx.scope}") mkString "\n")
+ }
+
+ /** A condensed context provides only a small memory footprint over
+ * a Context base, and therefore can be stored without problems in
+ * long-lived objects.
+ abstract class CondensedContext extends Context {
+ override def condensed = this
+ }
+ */
+
+ /** A fresh context allows selective modification
+ * of its attributes using the with... methods.
+ */
+ abstract class FreshContext extends Context {
+ def setPeriod(period: Period): this.type = { this.period = period; this }
+ def setMode(mode: Mode): this.type = { this.mode = mode; this }
+ def setCompilerCallback(callback: CompilerCallback): this.type = { this.compilerCallback = callback; this }
+ def setSbtCallback(callback: AnalysisCallback): this.type = { this.sbtCallback = callback; this }
+ def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this }
+ def setReporter(reporter: Reporter): this.type = setTyperState(typerState.withReporter(reporter))
+ def setNewTyperState: this.type = setTyperState(typerState.fresh(isCommittable = true))
+ def setExploreTyperState: this.type = setTyperState(typerState.fresh(isCommittable = false))
+ def setPrinterFn(printer: Context => Printer): this.type = { this.printerFn = printer; this }
+ def setOwner(owner: Symbol): this.type = { assert(owner != NoSymbol); this.owner = owner; this }
+ def setSettings(sstate: SettingsState): this.type = { this.sstate = sstate; this }
+ def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { this.compilationUnit = compilationUnit; this }
+ def setTree(tree: Tree[_ >: Untyped]): this.type = { this.tree = tree; this }
+ def setScope(scope: Scope): this.type = { this.scope = scope; this }
+ def setNewScope: this.type = { this.scope = newScope; this }
+ def setTypeAssigner(typeAssigner: TypeAssigner): this.type = { this.typeAssigner = typeAssigner; this }
+ def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) }
+ def setImportInfo(importInfo: ImportInfo): this.type = { this.importInfo = importInfo; this }
+ def setRunInfo(runInfo: RunInfo): this.type = { this.runInfo = runInfo; this }
+ def setDiagnostics(diagnostics: Option[StringBuilder]): this.type = { this.diagnostics = diagnostics; this }
+ def setGadt(gadt: GADTMap): this.type = { this.gadt = gadt; this }
+ def setTypeComparerFn(tcfn: Context => TypeComparer): this.type = { this.typeComparer = tcfn(this); this }
+ def setSearchHistory(searchHistory: SearchHistory): this.type = { this.searchHistory = searchHistory; this }
+ def setFreshNames(freshNames: FreshNameCreator): this.type = { this.freshNames = freshNames; this }
+ def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = { this.moreProperties = moreProperties; this }
+
+ def setProperty[T](key: Key[T], value: T): this.type =
+ setMoreProperties(moreProperties.updated(key, value))
+
+ def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid))
+ def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end))
+
+ def setSetting[T](setting: Setting[T], value: T): this.type =
+ setSettings(setting.updateIn(sstate, value))
+
+ def setFreshGADTBounds: this.type = { this.gadt = new GADTMap(gadt.bounds); this }
+
+ def setDebug = setSetting(base.settings.debug, true)
+ }
+
+ implicit class ModeChanges(val c: Context) extends AnyVal {
+ final def withModeBits(mode: Mode): Context =
+ if (mode != c.mode) c.fresh.setMode(mode) else c
+
+ final def addMode(mode: Mode): Context = withModeBits(c.mode | mode)
+ final def maskMode(mode: Mode): Context = withModeBits(c.mode & mode)
+ final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode)
+ }
+
+ implicit class FreshModeChanges(val c: FreshContext) extends AnyVal {
+ final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode)
+ final def maskMode(mode: Mode): c.type = c.setMode(c.mode & mode)
+ final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode)
+ }
+
+ /** A class defining the initial context with given context base
+ * and set of possible settings.
+ */
+ private class InitialContext(val base: ContextBase, settings: SettingGroup) extends FreshContext {
+ outer = NoContext
+ period = InitialPeriod
+ mode = Mode.None
+ typerState = new TyperState(new ConsoleReporter())
+ printerFn = new RefinedPrinter(_)
+ owner = NoSymbol
+ sstate = settings.defaultState
+ tree = untpd.EmptyTree
+ typeAssigner = TypeAssigner
+ runInfo = new RunInfo(this)
+ diagnostics = None
+ freshNames = new FreshNameCreator.Default
+ moreProperties = Map.empty
+ typeComparer = new TypeComparer(this)
+ searchHistory = new SearchHistory(0, Map())
+ gadt = new GADTMap(SimpleMap.Empty)
+ }
+
+ @sharable object NoContext extends Context {
+ val base = null
+ override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null)(this)
+ }
+
+ /** A context base defines state and associated methods that exist once per
+ * compiler run.
+ */
+ class ContextBase extends ContextState
+ with Denotations.DenotationsBase
+ with Phases.PhasesBase {
+
+ /** The applicable settings */
+ val settings = new ScalaSettings
+
+ /** The initial context */
+ val initialCtx: Context = new InitialContext(this, settings)
+
+ /** The symbol loaders */
+ val loaders = new SymbolLoaders
+
+ /** The platform, initialized by `initPlatform()`. */
+ private var _platform: Platform = _
+
+ /** The platform */
+ def platform: Platform = {
+ if (_platform == null) {
+ throw new IllegalStateException(
+ "initialize() must be called before accessing platform")
+ }
+ _platform
+ }
+
+ protected def newPlatform(implicit ctx: Context): Platform =
+ new JavaPlatform
+
+ /** The loader that loads the members of _root_ */
+ def rootLoader(root: TermSymbol)(implicit ctx: Context): SymbolLoader = platform.rootLoader(root)
+
+ // Set up some phases to get started */
+ usePhases(List(SomePhase))
+
+ /** The standard definitions */
+ val definitions = new Definitions
+
+ /** Initializes the `ContextBase` with a starting context.
+ * This initializes the `platform` and the `definitions`.
+ */
+ def initialize()(implicit ctx: Context): Unit = {
+ _platform = newPlatform
+ definitions.init()
+ }
+
+ def squashed(p: Phase): Phase = {
+ allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase)
+ }
+ }
+
+ /** The essential mutable state of a context base, collected into a common class */
+ class ContextState {
+ // Symbols state
+
+ /** A counter for unique ids */
+ private[core] var _nextId = 0
+
+ def nextId = { _nextId += 1; _nextId }
+
+ /** A map from a superclass id to the typeref of the class that has it */
+ private[core] var classOfId = new Array[ClassSymbol](Config.InitialSuperIdsSize)
+
+ /** A map from a the typeref of a class to its superclass id */
+ private[core] val superIdOfClass = new mutable.AnyRefMap[ClassSymbol, Int]
+
+ /** The last allocated superclass id */
+ private[core] var lastSuperId = -1
+
+ /** Allocate and return next free superclass id */
+ private[core] def nextSuperId: Int = {
+ lastSuperId += 1
+ if (lastSuperId >= classOfId.length) {
+ val tmp = new Array[ClassSymbol](classOfId.length * 2)
+ classOfId.copyToArray(tmp)
+ classOfId = tmp
+ }
+ lastSuperId
+ }
+
+ // Types state
+ /** A table for hash consing unique types */
+ private[core] val uniques = new util.HashSet[Type](Config.initialUniquesCapacity) {
+ override def hash(x: Type): Int = x.hash
+ }
+
+ /** A table for hash consing unique refined types */
+ private[dotc] val uniqueRefinedTypes = new RefinedUniques
+
+ /** A table for hash consing unique named types */
+ private[core] val uniqueNamedTypes = new NamedTypeUniques
+
+ /** A table for hash consing unique type bounds */
+ private[core] val uniqueTypeAliases = new TypeAliasUniques
+
+ private def uniqueSets = Map(
+ "uniques" -> uniques,
+ "uniqueRefinedTypes" -> uniqueRefinedTypes,
+ "uniqueNamedTypes" -> uniqueNamedTypes,
+ "uniqueTypeAliases" -> uniqueTypeAliases)
+
+ /** A map that associates label and size of all uniques sets */
+ def uniquesSizes: Map[String, Int] = uniqueSets.mapValues(_.size)
+
+ /** The number of recursive invocation of underlying on a NamedType
+ * during a controlled operation.
+ */
+ private[core] var underlyingRecursions: Int = 0
+
+ /** The set of named types on which a currently active invocation
+ * of underlying during a controlled operation exists. */
+ private[core] val pendingUnderlying = new mutable.HashSet[Type]
+
+ /** A flag that some unsafe nonvariant instantiation was encountered
+ * in this run. Used as a shortcut to a avoid scans of types in
+ * Typer.typedSelect.
+ */
+ private[dotty] var unsafeNonvariant: RunId = NoRunId
+
+ // Phases state
+
+ private[core] var phasesPlan: List[List[Phase]] = _
+
+ /** Phases by id */
+ private[core] var phases: Array[Phase] = _
+
+ /** Phases with consecutive Transforms grouped into a single phase, Empty array if squashing is disabled */
+ private[core] var squashedPhases: Array[Phase] = Array.empty[Phase]
+
+ /** Next denotation transformer id */
+ private[core] var nextDenotTransformerId: Array[Int] = _
+
+ private[core] var denotTransformers: Array[DenotTransformer] = _
+
+ // Printers state
+ /** Number of recursive invocations of a show method on current stack */
+ private[dotc] var toTextRecursions = 0
+
+ // Reporters state
+ private[dotc] var indent = 0
+
+ protected[dotc] val indentTab = " "
+
+ def reset() = {
+ for ((_, set) <- uniqueSets) set.clear()
+ for (i <- 0 until classOfId.length) classOfId(i) = null
+ superIdOfClass.clear()
+ lastSuperId = -1
+ }
+
+ // Test that access is single threaded
+
+ /** The thread on which `checkSingleThreaded was invoked last */
+ @sharable private var thread: Thread = null
+
+ /** Check that we are on the same thread as before */
+ def checkSingleThreaded() =
+ if (thread == null) thread = Thread.currentThread()
+ else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase")
+ }
+
+ object Context {
+
+ /** Implicit conversion that injects all printer operations into a context */
+ implicit def toPrinter(ctx: Context): Printer = ctx.printer
+
+ /** implicit conversion that injects all ContextBase members into a context */
+ implicit def toBase(ctx: Context): ContextBase = ctx.base
+
+ // @sharable val theBase = new ContextBase // !!! DEBUG, so that we can use a minimal context for reporting even in code that normally cannot access a context
+ }
+
+ /** Info that changes on each compiler run */
+ class RunInfo(initctx: Context) extends ImplicitRunInfo with ConstraintRunInfo {
+ implicit val ctx: Context = initctx
+ }
+
+ class GADTMap(initBounds: SimpleMap[Symbol, TypeBounds]) {
+ private var myBounds = initBounds
+ def setBounds(sym: Symbol, b: TypeBounds): Unit =
+ myBounds = myBounds.updated(sym, b)
+ def bounds = myBounds
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala
new file mode 100644
index 000000000..a105741f5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala
@@ -0,0 +1,185 @@
+package dotty.tools.dotc
+package core
+
+import annotation.tailrec
+import Symbols._
+import Contexts._, Names._, Phases._, printing.Texts._, printing.Printer, printing.Showable
+import util.Positions.Position, util.SourcePosition
+import collection.mutable.ListBuffer
+import dotty.tools.dotc.transform.TreeTransforms._
+import ast.tpd._
+import scala.language.implicitConversions
+import printing.Formatting._
+
+/** This object provides useful implicit decorators for types defined elsewhere */
+object Decorators {
+
+ /** Turns Strings into PreNames, adding toType/TermName methods */
+ implicit class StringDecorator(val s: String) extends AnyVal with PreName {
+ def toTypeName: TypeName = typeName(s)
+ def toTermName: TermName = termName(s)
+ def toText(printer: Printer): Text = Str(s)
+ }
+
+ /** Implements a findSymbol method on iterators of Symbols that
+ * works like find but avoids Option, replacing None with NoSymbol.
+ */
+ implicit class SymbolIteratorDecorator(val it: Iterator[Symbol]) extends AnyVal {
+ final def findSymbol(p: Symbol => Boolean): Symbol = {
+ while (it.hasNext) {
+ val sym = it.next
+ if (p(sym)) return sym
+ }
+ NoSymbol
+ }
+ }
+
+ final val MaxFilterRecursions = 1000
+
+ /** Implements filterConserve, zipWithConserve methods
+ * on lists that avoid duplication of list nodes where feasible.
+ */
+ implicit class ListDecorator[T](val xs: List[T]) extends AnyVal {
+
+ final def mapconserve[U](f: T => U): List[U] = {
+ @tailrec
+ def loop(mapped: ListBuffer[U], unchanged: List[U], pending: List[T]): List[U] =
+ if (pending.isEmpty) {
+ if (mapped eq null) unchanged
+ else mapped.prependToList(unchanged)
+ } else {
+ val head0 = pending.head
+ val head1 = f(head0)
+
+ if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef])
+ loop(mapped, unchanged, pending.tail)
+ else {
+ val b = if (mapped eq null) new ListBuffer[U] else mapped
+ var xc = unchanged
+ while (xc ne pending) {
+ b += xc.head
+ xc = xc.tail
+ }
+ b += head1
+ val tail0 = pending.tail
+ loop(b, tail0.asInstanceOf[List[U]], tail0)
+ }
+ }
+ loop(null, xs.asInstanceOf[List[U]], xs)
+ }
+
+ /** Like `xs filter p` but returns list `xs` itself - instead of a copy -
+ * if `p` is true for all elements and `xs` is not longer
+ * than `MaxFilterRecursions`.
+ */
+ def filterConserve(p: T => Boolean): List[T] = {
+ def loop(xs: List[T], nrec: Int): List[T] = xs match {
+ case Nil => xs
+ case x :: xs1 =>
+ if (nrec < MaxFilterRecursions) {
+ val ys1 = loop(xs1, nrec + 1)
+ if (p(x))
+ if (ys1 eq xs1) xs else x :: ys1
+ else
+ ys1
+ } else xs filter p
+ }
+ loop(xs, 0)
+ }
+
+ /** Like `(xs, ys).zipped.map(f)`, but returns list `xs` itself
+ * - instead of a copy - if function `f` maps all elements of
+ * `xs` to themselves. Also, it is required that `ys` is at least
+ * as long as `xs`.
+ */
+ def zipWithConserve[U](ys: List[U])(f: (T, U) => T): List[T] =
+ if (xs.isEmpty) xs
+ else {
+ val x1 = f(xs.head, ys.head)
+ val xs1 = xs.tail.zipWithConserve(ys.tail)(f)
+ if ((x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) &&
+ (xs1 eq xs.tail)) xs
+ else x1 :: xs1
+ }
+
+ def foldRightBN[U](z: => U)(op: (T, => U) => U): U = xs match {
+ case Nil => z
+ case x :: xs1 => op(x, xs1.foldRightBN(z)(op))
+ }
+
+ final def hasSameLengthAs[U](ys: List[U]): Boolean = {
+ @tailrec def loop(xs: List[T], ys: List[U]): Boolean =
+ if (xs.isEmpty) ys.isEmpty
+ else ys.nonEmpty && loop(xs.tail, ys.tail)
+ loop(xs, ys)
+ }
+
+ /** Union on lists seen as sets */
+ def | (ys: List[T]): List[T] = xs ++ (ys filterNot (xs contains _))
+
+ /** Intersection on lists seen as sets */
+ def & (ys: List[T]): List[T] = xs filter (ys contains _)
+ }
+
+ implicit class ListOfListDecorator[T](val xss: List[List[T]]) extends AnyVal {
+ def nestedMap[U](f: T => U): List[List[U]] = xss map (_ map f)
+ def nestedMapconserve[U](f: T => U): List[List[U]] = xss mapconserve (_ mapconserve f)
+ }
+
+ implicit class TextToString(val text: Text) extends AnyVal {
+ def show(implicit ctx: Context) = text.mkString(ctx.settings.pageWidth.value)
+ }
+
+ /** Test whether a list of strings representing phases contains
+ * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the
+ * exact meaning of "contains" here.
+ */
+ implicit class PhaseListDecorator(val names: List[String]) extends AnyVal {
+ def containsPhase(phase: Phase): Boolean = phase match {
+ case phase: TreeTransformer => phase.miniPhases.exists(containsPhase)
+ case _ =>
+ names exists { name =>
+ name == "all" || {
+ val strippedName = name.stripSuffix("+")
+ val logNextPhase = name ne strippedName
+ phase.phaseName.startsWith(strippedName) ||
+ (logNextPhase && phase.prev.phaseName.startsWith(strippedName))
+ }
+ }
+ }
+ }
+
+ implicit def sourcePos(pos: Position)(implicit ctx: Context): SourcePosition = {
+ def recur(inlinedCalls: List[Tree], pos: Position): SourcePosition = inlinedCalls match {
+ case inlinedCall :: rest =>
+ sourceFile(inlinedCall).atPos(pos).withOuter(recur(rest, inlinedCall.pos))
+ case empty =>
+ ctx.source.atPos(pos)
+ }
+ recur(enclosingInlineds, pos)
+ }
+
+ implicit class StringInterpolators(val sc: StringContext) extends AnyVal {
+
+ /** General purpose string formatting */
+ def i(args: Any*)(implicit ctx: Context): String =
+ new StringFormatter(sc).assemble(args)
+
+ /** Formatting for error messages: Like `i` but suppress follow-on
+ * error messages after the first one if some of their arguments are "non-sensical".
+ */
+ def em(args: Any*)(implicit ctx: Context): String =
+ new ErrorMessageFormatter(sc).assemble(args)
+
+ /** Formatting with added explanations: Like `em`, but add explanations to
+ * give more info about type variables and to disambiguate where needed.
+ */
+ def ex(args: Any*)(implicit ctx: Context): String =
+ explained2(implicit ctx => em(args: _*))
+
+ /** Formatter that adds syntax highlighting to all interpolated values */
+ def hl(args: Any*)(implicit ctx: Context): String =
+ new SyntaxFormatter(sc).assemble(args).stripMargin
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala
new file mode 100644
index 000000000..4b090d9b1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala
@@ -0,0 +1,807 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._, Denotations._, SymDenotations._, StdNames._, Names._
+import Flags._, Scopes._, Decorators._, NameOps._, util.Positions._, Periods._
+import unpickleScala2.Scala2Unpickler.ensureConstructor
+import scala.annotation.{ switch, meta }
+import scala.collection.{ mutable, immutable }
+import PartialFunction._
+import collection.mutable
+import scala.reflect.api.{ Universe => ApiUniverse }
+
+object Definitions {
+ val MaxTupleArity, MaxAbstractFunctionArity = 22
+ val MaxFunctionArity = 30
+ // Awaiting a definite solution that drops the limit altogether, 30 gives a safety
+ // margin over the previous 22, so that treecopiers in miniphases are allowed to
+ // temporarily create larger closures. This is needed in lambda lift where large closures
+ // are first formed by treecopiers before they are split apart into parameters and
+ // environment in the lambdalift transform itself.
+}
+
+/** A class defining symbols and types of standard definitions
+ *
+ * Note: There's a much nicer design possible once we have implicit functions.
+ * The idea is explored to some degree in branch wip-definitions (#929): Instead of a type
+ * and a separate symbol definition, we produce in one line an implicit function from
+ * Context to Symbol, and possibly also the corresponding type. This cuts down on all
+ * the duplication encountered here.
+ *
+ * wip-definitions tries to do the same with an implicit conversion from a SymbolPerRun
+ * type to a symbol type. The problem with that is universal equality. Comparisons will
+ * not trigger the conversion and will therefore likely return false results.
+ *
+ * So the branch is put on hold, until we have implicit functions, which will always
+ * automatically be dereferenced.
+ */
+class Definitions {
+ import Definitions._
+
+ private implicit var ctx: Context = _
+
+ private def newSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) =
+ ctx.newSymbol(owner, name, flags | Permanent, info)
+
+ private def newClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) =
+ ctx.newClassSymbol(owner, name, flags | Permanent, infoFn).entered
+
+ private def newCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope = newScope) =
+ ctx.newCompleteClassSymbol(owner, name, flags | Permanent, parents, decls).entered
+
+ private def newTopClassSymbol(name: TypeName, flags: FlagSet, parents: List[TypeRef]) =
+ completeClass(newCompleteClassSymbol(ScalaPackageClass, name, flags, parents))
+
+ private def newTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
+ scope.enter(newSymbol(cls, name, flags, TypeBounds.empty))
+
+ private def newTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
+ newTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope)
+
+ private def newSyntheticTypeParam(cls: ClassSymbol, scope: MutableScope, paramFlags: FlagSet, suffix: String = "T0") =
+ newTypeParam(cls, suffix.toTypeName.expandedName(cls), ExpandedName | paramFlags, scope)
+
+ // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only
+ // implemented in Dotty and not in Scala 2.
+ // See <http://docs.scala-lang.org/sips/pending/repeated-byname.html>.
+ private def specialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = {
+ val completer = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val cls = denot.asClass.classSymbol
+ val paramDecls = newScope
+ val typeParam = newSyntheticTypeParam(cls, paramDecls, paramFlags)
+ def instantiate(tpe: Type) =
+ if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef)
+ else tpe
+ val parents = parentConstrs.toList map instantiate
+ val parentRefs: List[TypeRef] = ctx.normalizeToClassRefs(parents, cls, paramDecls)
+ denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parentRefs, paramDecls)
+ }
+ }
+ newClassSymbol(ScalaPackageClass, name, EmptyFlags, completer)
+ }
+
+ private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol =
+ newSymbol(cls, name.encode, flags | Method, info).entered.asTerm
+
+ private def newAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = {
+ val sym = newSymbol(ScalaPackageClass, name, flags, TypeAlias(tpe))
+ ScalaPackageClass.currentPackageDecls.enter(sym)
+ sym
+ }
+
+ private def newPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int,
+ resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags) = {
+ val tparamNames = tpnme.syntheticTypeParamNames(typeParamCount)
+ val tparamBounds = tparamNames map (_ => TypeBounds.empty)
+ val ptype = PolyType(tparamNames)(_ => tparamBounds, resultTypeFn)
+ newMethod(cls, name, ptype, flags)
+ }
+
+ private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ newPolyMethod(cls, name, 1, resultTypeFn, flags)
+
+ private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ newPolyMethod(cls, name, 1, pt => MethodType(Nil, resultTypeFn(pt)), flags)
+
+ private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef] = {
+ val arr = new Array[TypeRef](arity + 1)
+ for (i <- countFrom to arity) arr(i) = ctx.requiredClassRef(name + i)
+ arr
+ }
+
+ private def completeClass(cls: ClassSymbol): ClassSymbol = {
+ ensureConstructor(cls, EmptyScope)
+ if (cls.linkedClass.exists) cls.linkedClass.info = NoType
+ cls
+ }
+
+ lazy val RootClass: ClassSymbol = ctx.newPackageSymbol(
+ NoSymbol, nme.ROOT, (root, rootcls) => ctx.rootLoader(root)).moduleClass.asClass
+ lazy val RootPackage: TermSymbol = ctx.newSymbol(
+ NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass))
+
+ lazy val EmptyPackageVal = ctx.newPackageSymbol(
+ RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.rootLoader(emptypkg)).entered
+ lazy val EmptyPackageClass = EmptyPackageVal.moduleClass.asClass
+
+ /** A package in which we can place all methods that are interpreted specially by the compiler */
+ lazy val OpsPackageVal = ctx.newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered
+ lazy val OpsPackageClass = OpsPackageVal.moduleClass.asClass
+
+ lazy val ScalaPackageVal = ctx.requiredPackage("scala")
+ lazy val ScalaMathPackageVal = ctx.requiredPackage("scala.math")
+ lazy val ScalaPackageClass = ScalaPackageVal.moduleClass.asClass
+ lazy val JavaPackageVal = ctx.requiredPackage("java")
+ lazy val JavaLangPackageVal = ctx.requiredPackage("java.lang")
+ // fundamental modules
+ lazy val SysPackage = ctx.requiredModule("scala.sys.package")
+ lazy val Sys_errorR = SysPackage.moduleClass.requiredMethodRef(nme.error)
+ def Sys_error(implicit ctx: Context) = Sys_errorR.symbol
+
+ /** The `scalaShadowing` package is used to safely modify classes and
+ * objects in scala so that they can be used from dotty. They will
+ * be visible as members of the `scala` package, replacing any objects
+ * or classes with the same name. But their binary artifacts are
+ * in `scalaShadowing` so they don't clash with the same-named `scala`
+ * members at runtime.
+ */
+ lazy val ScalaShadowingPackageVal = ctx.requiredPackage("scalaShadowing")
+ lazy val ScalaShadowingPackageClass = ScalaShadowingPackageVal.moduleClass.asClass
+
+ /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter)
+ * because after erasure the Any and AnyVal references get remapped to the Object methods
+ * which would result in a double binding assertion failure.
+ * Instead we do the following:
+ *
+ * - Have some methods exist only in Any, and remap them with the Erasure denotation
+ * transformer to be owned by Object.
+ * - Have other methods exist only in Object.
+ * To achieve this, we synthesize all Any and Object methods; Object methods no longer get
+ * loaded from a classfile.
+ *
+ * There's a remaining question about `getClass`. In Scala2.x `getClass` was handled by compiler magic.
+ * This is deemed too cumersome for Dotty and therefore right now `getClass` gets no special treatment;
+ * it's just a method on `Any` which returns the raw type `java.lang.Class`. An alternative
+ * way to get better `getClass` typing would be to treat `getClass` as a method of a generic
+ * decorator which gets remapped in a later phase to Object#getClass. Then we could give it
+ * the right type without changing the typechecker:
+ *
+ * implicit class AnyGetClass[T](val x: T) extends AnyVal {
+ * def getClass: java.lang.Class[T] = ???
+ * }
+ */
+ lazy val AnyClass: ClassSymbol = completeClass(newCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil))
+ def AnyType = AnyClass.typeRef
+ lazy val AnyValClass: ClassSymbol = completeClass(newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyClass.typeRef)))
+ def AnyValType = AnyValClass.typeRef
+
+ lazy val Any_== = newMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final)
+ lazy val Any_!= = newMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final)
+ lazy val Any_equals = newMethod(AnyClass, nme.equals_, methOfAny(BooleanType))
+ lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType))
+ lazy val Any_toString = newMethod(AnyClass, nme.toString_, MethodType(Nil, StringType))
+ lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final)
+ lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final)
+ lazy val Any_isInstanceOf = newT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final)
+ lazy val Any_asInstanceOf = newT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final)
+
+ def AnyMethods = List(Any_==, Any_!=, Any_equals, Any_hashCode,
+ Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf)
+
+ lazy val ObjectClass: ClassSymbol = {
+ val cls = ctx.requiredClass("java.lang.Object")
+ assert(!cls.isCompleted, "race for completing java.lang.Object")
+ cls.info = ClassInfo(cls.owner.thisType, cls, AnyClass.typeRef :: Nil, newScope)
+
+ // The companion object doesn't really exist, `NoType` is the general
+ // technique to do that. Here we need to set it before completing
+ // attempt to load Object's classfile, which causes issue #1648.
+ val companion = JavaLangPackageVal.info.decl(nme.Object).symbol
+ companion.info = NoType // to indicate that it does not really exist
+
+ completeClass(cls)
+ }
+ def ObjectType = ObjectClass.typeRef
+
+ lazy val AnyRefAlias: TypeSymbol = newAliasType(tpnme.AnyRef, ObjectType)
+ def AnyRefType = AnyRefAlias.typeRef
+
+ lazy val Object_eq = newMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final)
+ lazy val Object_ne = newMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final)
+ lazy val Object_synchronized = newPolyMethod(ObjectClass, nme.synchronized_, 1,
+ pt => MethodType(List(PolyParam(pt, 0)), PolyParam(pt, 0)), Final)
+ lazy val Object_clone = newMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected)
+ lazy val Object_finalize = newMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected)
+ lazy val Object_notify = newMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType))
+ lazy val Object_notifyAll = newMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType))
+ lazy val Object_wait = newMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType))
+ lazy val Object_waitL = newMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType))
+ lazy val Object_waitLI = newMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType))
+
+ def ObjectMethods = List(Object_eq, Object_ne, Object_synchronized, Object_clone,
+ Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI)
+
+ /** Dummy method needed by elimByName */
+ lazy val dummyApply = newPolyMethod(
+ OpsPackageClass, nme.dummyApply, 1,
+ pt => MethodType(List(FunctionOf(Nil, PolyParam(pt, 0))), PolyParam(pt, 0)))
+
+ /** Method representing a throw */
+ lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
+ MethodType(List(ThrowableType), NothingType))
+
+ lazy val NothingClass: ClassSymbol = newCompleteClassSymbol(
+ ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyClass.typeRef))
+ def NothingType = NothingClass.typeRef
+ lazy val NullClass: ClassSymbol = newCompleteClassSymbol(
+ ScalaPackageClass, tpnme.Null, AbstractFinal, List(ObjectClass.typeRef))
+ def NullType = NullClass.typeRef
+
+ lazy val ScalaPredefModuleRef = ctx.requiredModuleRef("scala.Predef")
+ def ScalaPredefModule(implicit ctx: Context) = ScalaPredefModuleRef.symbol
+
+ lazy val Predef_conformsR = ScalaPredefModule.requiredMethodRef("$conforms")
+ def Predef_conforms(implicit ctx: Context) = Predef_conformsR.symbol
+ lazy val Predef_classOfR = ScalaPredefModule.requiredMethodRef("classOf")
+ def Predef_classOf(implicit ctx: Context) = Predef_classOfR.symbol
+
+ lazy val ScalaRuntimeModuleRef = ctx.requiredModuleRef("scala.runtime.ScalaRunTime")
+ def ScalaRuntimeModule(implicit ctx: Context) = ScalaRuntimeModuleRef.symbol
+ def ScalaRuntimeClass(implicit ctx: Context) = ScalaRuntimeModule.moduleClass.asClass
+
+ def runtimeMethodRef(name: PreName) = ScalaRuntimeModule.requiredMethodRef(name)
+ def ScalaRuntime_dropR(implicit ctx: Context) = runtimeMethodRef(nme.drop)
+ def ScalaRuntime_drop(implicit ctx: Context) = ScalaRuntime_dropR.symbol
+
+ lazy val BoxesRunTimeModuleRef = ctx.requiredModuleRef("scala.runtime.BoxesRunTime")
+ def BoxesRunTimeModule(implicit ctx: Context) = BoxesRunTimeModuleRef.symbol
+ def BoxesRunTimeClass(implicit ctx: Context) = BoxesRunTimeModule.moduleClass.asClass
+ lazy val ScalaStaticsModuleRef = ctx.requiredModuleRef("scala.runtime.Statics")
+ def ScalaStaticsModule(implicit ctx: Context) = ScalaStaticsModuleRef.symbol
+ def ScalaStaticsClass(implicit ctx: Context) = ScalaStaticsModule.moduleClass.asClass
+
+ def staticsMethodRef(name: PreName) = ScalaStaticsModule.requiredMethodRef(name)
+ def staticsMethod(name: PreName) = ScalaStaticsModule.requiredMethod(name)
+
+ lazy val DottyPredefModuleRef = ctx.requiredModuleRef("dotty.DottyPredef")
+ def DottyPredefModule(implicit ctx: Context) = DottyPredefModuleRef.symbol
+
+ def Predef_eqAny(implicit ctx: Context) = DottyPredefModule.requiredMethod(nme.eqAny)
+
+ lazy val DottyArraysModuleRef = ctx.requiredModuleRef("dotty.runtime.Arrays")
+ def DottyArraysModule(implicit ctx: Context) = DottyArraysModuleRef.symbol
+ def newGenericArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newGenericArray")
+ def newArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newArray")
+
+ lazy val NilModuleRef = ctx.requiredModuleRef("scala.collection.immutable.Nil")
+ def NilModule(implicit ctx: Context) = NilModuleRef.symbol
+
+ lazy val SingletonClass: ClassSymbol =
+ // needed as a synthetic class because Scala 2.x refers to it in classfiles
+ // but does not define it as an explicit class.
+ newCompleteClassSymbol(
+ ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final,
+ List(AnyClass.typeRef), EmptyScope)
+
+ lazy val SeqType: TypeRef = ctx.requiredClassRef("scala.collection.Seq")
+ def SeqClass(implicit ctx: Context) = SeqType.symbol.asClass
+
+ lazy val Seq_applyR = SeqClass.requiredMethodRef(nme.apply)
+ def Seq_apply(implicit ctx: Context) = Seq_applyR.symbol
+ lazy val Seq_headR = SeqClass.requiredMethodRef(nme.head)
+ def Seq_head(implicit ctx: Context) = Seq_headR.symbol
+
+ lazy val ArrayType: TypeRef = ctx.requiredClassRef("scala.Array")
+ def ArrayClass(implicit ctx: Context) = ArrayType.symbol.asClass
+ lazy val Array_applyR = ArrayClass.requiredMethodRef(nme.apply)
+ def Array_apply(implicit ctx: Context) = Array_applyR.symbol
+ lazy val Array_updateR = ArrayClass.requiredMethodRef(nme.update)
+ def Array_update(implicit ctx: Context) = Array_updateR.symbol
+ lazy val Array_lengthR = ArrayClass.requiredMethodRef(nme.length)
+ def Array_length(implicit ctx: Context) = Array_lengthR.symbol
+ lazy val Array_cloneR = ArrayClass.requiredMethodRef(nme.clone_)
+ def Array_clone(implicit ctx: Context) = Array_cloneR.symbol
+ lazy val ArrayConstructorR = ArrayClass.requiredMethodRef(nme.CONSTRUCTOR)
+ def ArrayConstructor(implicit ctx: Context) = ArrayConstructorR.symbol
+ lazy val ArrayModuleType = ctx.requiredModuleRef("scala.Array")
+ def ArrayModule(implicit ctx: Context) = ArrayModuleType.symbol.moduleClass.asClass
+
+
+ lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", BoxedUnitType, java.lang.Void.TYPE, UnitEnc)
+ def UnitClass(implicit ctx: Context) = UnitType.symbol.asClass
+ lazy val BooleanType = valueTypeRef("scala.Boolean", BoxedBooleanType, java.lang.Boolean.TYPE, BooleanEnc)
+ def BooleanClass(implicit ctx: Context) = BooleanType.symbol.asClass
+ lazy val Boolean_notR = BooleanClass.requiredMethodRef(nme.UNARY_!)
+ def Boolean_! = Boolean_notR.symbol
+ lazy val Boolean_andR = BooleanClass.requiredMethodRef(nme.ZAND) // ### harmonize required... calls
+ def Boolean_&& = Boolean_andR.symbol
+ lazy val Boolean_orR = BooleanClass.requiredMethodRef(nme.ZOR)
+ def Boolean_|| = Boolean_orR.symbol
+
+ lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", BoxedByteType, java.lang.Byte.TYPE, ByteEnc)
+ def ByteClass(implicit ctx: Context) = ByteType.symbol.asClass
+ lazy val ShortType: TypeRef = valueTypeRef("scala.Short", BoxedShortType, java.lang.Short.TYPE, ShortEnc)
+ def ShortClass(implicit ctx: Context) = ShortType.symbol.asClass
+ lazy val CharType: TypeRef = valueTypeRef("scala.Char", BoxedCharType, java.lang.Character.TYPE, CharEnc)
+ def CharClass(implicit ctx: Context) = CharType.symbol.asClass
+ lazy val IntType: TypeRef = valueTypeRef("scala.Int", BoxedIntType, java.lang.Integer.TYPE, IntEnc)
+ def IntClass(implicit ctx: Context) = IntType.symbol.asClass
+ lazy val Int_minusR = IntClass.requiredMethodRef(nme.MINUS, List(IntType))
+ def Int_- = Int_minusR.symbol
+ lazy val Int_plusR = IntClass.requiredMethodRef(nme.PLUS, List(IntType))
+ def Int_+ = Int_plusR.symbol
+ lazy val Int_divR = IntClass.requiredMethodRef(nme.DIV, List(IntType))
+ def Int_/ = Int_divR.symbol
+ lazy val Int_mulR = IntClass.requiredMethodRef(nme.MUL, List(IntType))
+ def Int_* = Int_mulR.symbol
+ lazy val Int_eqR = IntClass.requiredMethodRef(nme.EQ, List(IntType))
+ def Int_== = Int_eqR.symbol
+ lazy val Int_geR = IntClass.requiredMethodRef(nme.GE, List(IntType))
+ def Int_>= = Int_geR.symbol
+ lazy val Int_leR = IntClass.requiredMethodRef(nme.LE, List(IntType))
+ def Int_<= = Int_leR.symbol
+ lazy val LongType: TypeRef = valueTypeRef("scala.Long", BoxedLongType, java.lang.Long.TYPE, LongEnc)
+ def LongClass(implicit ctx: Context) = LongType.symbol.asClass
+ lazy val Long_XOR_Long = LongType.member(nme.XOR).requiredSymbol(
+ x => (x is Method) && (x.info.firstParamTypes.head isRef defn.LongClass)
+ )
+ lazy val Long_LSR_Int = LongType.member(nme.LSR).requiredSymbol(
+ x => (x is Method) && (x.info.firstParamTypes.head isRef defn.IntClass)
+ )
+ lazy val FloatType: TypeRef = valueTypeRef("scala.Float", BoxedFloatType, java.lang.Float.TYPE, FloatEnc)
+ def FloatClass(implicit ctx: Context) = FloatType.symbol.asClass
+ lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", BoxedDoubleType, java.lang.Double.TYPE, DoubleEnc)
+ def DoubleClass(implicit ctx: Context) = DoubleType.symbol.asClass
+
+ lazy val BoxedUnitType: TypeRef = ctx.requiredClassRef("scala.runtime.BoxedUnit")
+ def BoxedUnitClass(implicit ctx: Context) = BoxedUnitType.symbol.asClass
+
+ def BoxedUnit_UNIT(implicit ctx: Context) = BoxedUnitClass.linkedClass.requiredValue("UNIT")
+
+ lazy val BoxedBooleanType: TypeRef = ctx.requiredClassRef("java.lang.Boolean")
+ def BoxedBooleanClass(implicit ctx: Context) = BoxedBooleanType.symbol.asClass
+ lazy val BoxedByteType: TypeRef = ctx.requiredClassRef("java.lang.Byte")
+ def BoxedByteClass(implicit ctx: Context) = BoxedByteType.symbol.asClass
+ lazy val BoxedShortType: TypeRef = ctx.requiredClassRef("java.lang.Short")
+ def BoxedShortClass(implicit ctx: Context) = BoxedShortType.symbol.asClass
+ lazy val BoxedCharType: TypeRef = ctx.requiredClassRef("java.lang.Character")
+ def BoxedCharClass(implicit ctx: Context) = BoxedCharType.symbol.asClass
+ lazy val BoxedIntType: TypeRef = ctx.requiredClassRef("java.lang.Integer")
+ def BoxedIntClass(implicit ctx: Context) = BoxedIntType.symbol.asClass
+ lazy val BoxedLongType: TypeRef = ctx.requiredClassRef("java.lang.Long")
+ def BoxedLongClass(implicit ctx: Context) = BoxedLongType.symbol.asClass
+ lazy val BoxedFloatType: TypeRef = ctx.requiredClassRef("java.lang.Float")
+ def BoxedFloatClass(implicit ctx: Context) = BoxedFloatType.symbol.asClass
+ lazy val BoxedDoubleType: TypeRef = ctx.requiredClassRef("java.lang.Double")
+ def BoxedDoubleClass(implicit ctx: Context) = BoxedDoubleType.symbol.asClass
+
+ lazy val BoxedBooleanModule = ctx.requiredModule("java.lang.Boolean")
+ lazy val BoxedByteModule = ctx.requiredModule("java.lang.Byte")
+ lazy val BoxedShortModule = ctx.requiredModule("java.lang.Short")
+ lazy val BoxedCharModule = ctx.requiredModule("java.lang.Character")
+ lazy val BoxedIntModule = ctx.requiredModule("java.lang.Integer")
+ lazy val BoxedLongModule = ctx.requiredModule("java.lang.Long")
+ lazy val BoxedFloatModule = ctx.requiredModule("java.lang.Float")
+ lazy val BoxedDoubleModule = ctx.requiredModule("java.lang.Double")
+ lazy val BoxedUnitModule = ctx.requiredModule("java.lang.Void")
+
+ lazy val ByNameParamClass2x = specialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType))
+ lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN, EmptyFlags, Seq(AnyType))
+
+ lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType))
+
+ // fundamental classes
+ lazy val StringClass = ctx.requiredClass("java.lang.String")
+ def StringType: Type = StringClass.typeRef
+ lazy val StringModule = StringClass.linkedClass
+
+ lazy val String_+ = newMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final)
+ lazy val String_valueOf_Object = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match {
+ case List(pt) => (pt isRef AnyClass) || (pt isRef ObjectClass)
+ case _ => false
+ }).symbol
+
+ lazy val JavaCloneableClass = ctx.requiredClass("java.lang.Cloneable")
+ lazy val NullPointerExceptionClass = ctx.requiredClass("java.lang.NullPointerException")
+ lazy val ClassClass = ctx.requiredClass("java.lang.Class")
+ lazy val BoxedNumberClass = ctx.requiredClass("java.lang.Number")
+ lazy val ThrowableClass = ctx.requiredClass("java.lang.Throwable")
+ lazy val ClassCastExceptionClass = ctx.requiredClass("java.lang.ClassCastException")
+ lazy val JavaSerializableClass = ctx.requiredClass("java.lang.Serializable")
+ lazy val ComparableClass = ctx.requiredClass("java.lang.Comparable")
+
+ // in scalac modified to have Any as parent
+
+ lazy val SerializableType: TypeRef = ctx.requiredClassRef("scala.Serializable")
+ def SerializableClass(implicit ctx: Context) = SerializableType.symbol.asClass
+ lazy val StringBuilderType: TypeRef = ctx.requiredClassRef("scala.collection.mutable.StringBuilder")
+ def StringBuilderClass(implicit ctx: Context) = StringBuilderType.symbol.asClass
+ lazy val MatchErrorType: TypeRef = ctx.requiredClassRef("scala.MatchError")
+ def MatchErrorClass(implicit ctx: Context) = MatchErrorType.symbol.asClass
+
+ lazy val StringAddType: TypeRef = ctx.requiredClassRef("scala.runtime.StringAdd")
+ def StringAddClass(implicit ctx: Context) = StringAddType.symbol.asClass
+
+ lazy val StringAdd_plusR = StringAddClass.requiredMethodRef(nme.raw.PLUS)
+ def StringAdd_+(implicit ctx: Context) = StringAdd_plusR.symbol
+
+ lazy val PartialFunctionType: TypeRef = ctx.requiredClassRef("scala.PartialFunction")
+ def PartialFunctionClass(implicit ctx: Context) = PartialFunctionType.symbol.asClass
+ lazy val AbstractPartialFunctionType: TypeRef = ctx.requiredClassRef("scala.runtime.AbstractPartialFunction")
+ def AbstractPartialFunctionClass(implicit ctx: Context) = AbstractPartialFunctionType.symbol.asClass
+ lazy val SymbolType: TypeRef = ctx.requiredClassRef("scala.Symbol")
+ def SymbolClass(implicit ctx: Context) = SymbolType.symbol.asClass
+ lazy val DynamicType: TypeRef = ctx.requiredClassRef("scala.Dynamic")
+ def DynamicClass(implicit ctx: Context) = DynamicType.symbol.asClass
+ lazy val OptionType: TypeRef = ctx.requiredClassRef("scala.Option")
+ def OptionClass(implicit ctx: Context) = OptionType.symbol.asClass
+ lazy val ProductType: TypeRef = ctx.requiredClassRef("scala.Product")
+ def ProductClass(implicit ctx: Context) = ProductType.symbol.asClass
+ lazy val Product_canEqualR = ProductClass.requiredMethodRef(nme.canEqual_)
+ def Product_canEqual(implicit ctx: Context) = Product_canEqualR.symbol
+ lazy val Product_productArityR = ProductClass.requiredMethodRef(nme.productArity)
+ def Product_productArity(implicit ctx: Context) = Product_productArityR.symbol
+ lazy val Product_productPrefixR = ProductClass.requiredMethodRef(nme.productPrefix)
+ def Product_productPrefix(implicit ctx: Context) = Product_productPrefixR.symbol
+ lazy val LanguageModuleRef = ctx.requiredModule("scala.language")
+ def LanguageModuleClass(implicit ctx: Context) = LanguageModuleRef.symbol.moduleClass.asClass
+ lazy val NonLocalReturnControlType: TypeRef = ctx.requiredClassRef("scala.runtime.NonLocalReturnControl")
+
+ lazy val ClassTagType = ctx.requiredClassRef("scala.reflect.ClassTag")
+ def ClassTagClass(implicit ctx: Context) = ClassTagType.symbol.asClass
+ def ClassTagModule(implicit ctx: Context) = ClassTagClass.companionModule
+
+ lazy val EqType = ctx.requiredClassRef("scala.Eq")
+ def EqClass(implicit ctx: Context) = EqType.symbol.asClass
+
+ // Annotation base classes
+ lazy val AnnotationType = ctx.requiredClassRef("scala.annotation.Annotation")
+ def AnnotationClass(implicit ctx: Context) = AnnotationType.symbol.asClass
+ lazy val ClassfileAnnotationType = ctx.requiredClassRef("scala.annotation.ClassfileAnnotation")
+ def ClassfileAnnotationClass(implicit ctx: Context) = ClassfileAnnotationType.symbol.asClass
+ lazy val StaticAnnotationType = ctx.requiredClassRef("scala.annotation.StaticAnnotation")
+ def StaticAnnotationClass(implicit ctx: Context) = StaticAnnotationType.symbol.asClass
+
+ // Annotation classes
+ lazy val AliasAnnotType = ctx.requiredClassRef("scala.annotation.internal.Alias")
+ def AliasAnnot(implicit ctx: Context) = AliasAnnotType.symbol.asClass
+ lazy val AnnotationDefaultAnnotType = ctx.requiredClassRef("scala.annotation.internal.AnnotationDefault")
+ def AnnotationDefaultAnnot(implicit ctx: Context) = AnnotationDefaultAnnotType.symbol.asClass
+ lazy val BodyAnnotType = ctx.requiredClassRef("scala.annotation.internal.Body")
+ def BodyAnnot(implicit ctx: Context) = BodyAnnotType.symbol.asClass
+ lazy val ChildAnnotType = ctx.requiredClassRef("scala.annotation.internal.Child")
+ def ChildAnnot(implicit ctx: Context) = ChildAnnotType.symbol.asClass
+ lazy val CovariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.CovariantBetween")
+ def CovariantBetweenAnnot(implicit ctx: Context) = CovariantBetweenAnnotType.symbol.asClass
+ lazy val ContravariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.ContravariantBetween")
+ def ContravariantBetweenAnnot(implicit ctx: Context) = ContravariantBetweenAnnotType.symbol.asClass
+ lazy val DeprecatedAnnotType = ctx.requiredClassRef("scala.deprecated")
+ def DeprecatedAnnot(implicit ctx: Context) = DeprecatedAnnotType.symbol.asClass
+ lazy val ImplicitNotFoundAnnotType = ctx.requiredClassRef("scala.annotation.implicitNotFound")
+ def ImplicitNotFoundAnnot(implicit ctx: Context) = ImplicitNotFoundAnnotType.symbol.asClass
+ lazy val InlineAnnotType = ctx.requiredClassRef("scala.inline")
+ def InlineAnnot(implicit ctx: Context) = InlineAnnotType.symbol.asClass
+ lazy val InlineParamAnnotType = ctx.requiredClassRef("scala.annotation.internal.InlineParam")
+ def InlineParamAnnot(implicit ctx: Context) = InlineParamAnnotType.symbol.asClass
+ lazy val InvariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.InvariantBetween")
+ def InvariantBetweenAnnot(implicit ctx: Context) = InvariantBetweenAnnotType.symbol.asClass
+ lazy val MigrationAnnotType = ctx.requiredClassRef("scala.annotation.migration")
+ def MigrationAnnot(implicit ctx: Context) = MigrationAnnotType.symbol.asClass
+ lazy val NativeAnnotType = ctx.requiredClassRef("scala.native")
+ def NativeAnnot(implicit ctx: Context) = NativeAnnotType.symbol.asClass
+ lazy val RemoteAnnotType = ctx.requiredClassRef("scala.remote")
+ def RemoteAnnot(implicit ctx: Context) = RemoteAnnotType.symbol.asClass
+ lazy val RepeatedAnnotType = ctx.requiredClassRef("scala.annotation.internal.Repeated")
+ def RepeatedAnnot(implicit ctx: Context) = RepeatedAnnotType.symbol.asClass
+ lazy val SourceFileAnnotType = ctx.requiredClassRef("scala.annotation.internal.SourceFile")
+ def SourceFileAnnot(implicit ctx: Context) = SourceFileAnnotType.symbol.asClass
+ lazy val ScalaSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaSignature")
+ def ScalaSignatureAnnot(implicit ctx: Context) = ScalaSignatureAnnotType.symbol.asClass
+ lazy val ScalaLongSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaLongSignature")
+ def ScalaLongSignatureAnnot(implicit ctx: Context) = ScalaLongSignatureAnnotType.symbol.asClass
+ lazy val ScalaStrictFPAnnotType = ctx.requiredClassRef("scala.annotation.strictfp")
+ def ScalaStrictFPAnnot(implicit ctx: Context) = ScalaStrictFPAnnotType.symbol.asClass
+ lazy val ScalaStaticAnnotType = ctx.requiredClassRef("scala.annotation.static")
+ def ScalaStaticAnnot(implicit ctx: Context) = ScalaStaticAnnotType.symbol.asClass
+ lazy val SerialVersionUIDAnnotType = ctx.requiredClassRef("scala.SerialVersionUID")
+ def SerialVersionUIDAnnot(implicit ctx: Context) = SerialVersionUIDAnnotType.symbol.asClass
+ lazy val TASTYSignatureAnnotType = ctx.requiredClassRef("scala.annotation.internal.TASTYSignature")
+ def TASTYSignatureAnnot(implicit ctx: Context) = TASTYSignatureAnnotType.symbol.asClass
+ lazy val TASTYLongSignatureAnnotType = ctx.requiredClassRef("scala.annotation.internal.TASTYLongSignature")
+ def TASTYLongSignatureAnnot(implicit ctx: Context) = TASTYLongSignatureAnnotType.symbol.asClass
+ lazy val TailrecAnnotType = ctx.requiredClassRef("scala.annotation.tailrec")
+ def TailrecAnnot(implicit ctx: Context) = TailrecAnnotType.symbol.asClass
+ lazy val SwitchAnnotType = ctx.requiredClassRef("scala.annotation.switch")
+ def SwitchAnnot(implicit ctx: Context) = SwitchAnnotType.symbol.asClass
+ lazy val ThrowsAnnotType = ctx.requiredClassRef("scala.throws")
+ def ThrowsAnnot(implicit ctx: Context) = ThrowsAnnotType.symbol.asClass
+ lazy val TransientAnnotType = ctx.requiredClassRef("scala.transient")
+ def TransientAnnot(implicit ctx: Context) = TransientAnnotType.symbol.asClass
+ lazy val UncheckedAnnotType = ctx.requiredClassRef("scala.unchecked")
+ def UncheckedAnnot(implicit ctx: Context) = UncheckedAnnotType.symbol.asClass
+ lazy val UncheckedStableAnnotType = ctx.requiredClassRef("scala.annotation.unchecked.uncheckedStable")
+ def UncheckedStableAnnot(implicit ctx: Context) = UncheckedStableAnnotType.symbol.asClass
+ lazy val UncheckedVarianceAnnotType = ctx.requiredClassRef("scala.annotation.unchecked.uncheckedVariance")
+ def UncheckedVarianceAnnot(implicit ctx: Context) = UncheckedVarianceAnnotType.symbol.asClass
+ lazy val UnsafeNonvariantAnnotType = ctx.requiredClassRef("scala.annotation.internal.UnsafeNonvariant")
+ def UnsafeNonvariantAnnot(implicit ctx: Context) = UnsafeNonvariantAnnotType.symbol.asClass
+ lazy val VolatileAnnotType = ctx.requiredClassRef("scala.volatile")
+ def VolatileAnnot(implicit ctx: Context) = VolatileAnnotType.symbol.asClass
+ lazy val FieldMetaAnnotType = ctx.requiredClassRef("scala.annotation.meta.field")
+ def FieldMetaAnnot(implicit ctx: Context) = FieldMetaAnnotType.symbol.asClass
+ lazy val GetterMetaAnnotType = ctx.requiredClassRef("scala.annotation.meta.getter")
+ def GetterMetaAnnot(implicit ctx: Context) = GetterMetaAnnotType.symbol.asClass
+ lazy val SetterMetaAnnotType = ctx.requiredClassRef("scala.annotation.meta.setter")
+ def SetterMetaAnnot(implicit ctx: Context) = SetterMetaAnnotType.symbol.asClass
+
+ // convenient one-parameter method types
+ def methOfAny(tp: Type) = MethodType(List(AnyType), tp)
+ def methOfAnyVal(tp: Type) = MethodType(List(AnyValType), tp)
+ def methOfAnyRef(tp: Type) = MethodType(List(ObjectType), tp)
+
+ // Derived types
+
+ def RepeatedParamType = RepeatedParamClass.typeRef
+ def ThrowableType = ThrowableClass.typeRef
+
+ def ClassType(arg: Type)(implicit ctx: Context) = {
+ val ctype = ClassClass.typeRef
+ if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg)
+ }
+
+ /** The enumeration type, goven a value of the enumeration */
+ def EnumType(sym: Symbol)(implicit ctx: Context) =
+ // given (in java): "class A { enum E { VAL1 } }"
+ // - sym: the symbol of the actual enumeration value (VAL1)
+ // - .owner: the ModuleClassSymbol of the enumeration (object E)
+ // - .linkedClass: the ClassSymbol of the enumeration (class E)
+ sym.owner.linkedClass.typeRef
+
+ object FunctionOf {
+ def apply(args: List[Type], resultType: Type)(implicit ctx: Context) =
+ FunctionType(args.length).appliedTo(args ::: resultType :: Nil)
+ def unapply(ft: Type)(implicit ctx: Context)/*: Option[(List[Type], Type)]*/ = {
+ // -language:keepUnions difference: unapply needs result type because inferred type
+ // is Some[(List[Type], Type)] | None, which is not a legal unapply type.
+ val tsym = ft.typeSymbol
+ lazy val targs = ft.argInfos
+ val numArgs = targs.length - 1
+ if (numArgs >= 0 && numArgs <= MaxFunctionArity &&
+ (FunctionType(numArgs).symbol == tsym)) Some(targs.init, targs.last)
+ else None
+ }
+ }
+
+ object ArrayOf {
+ def apply(elem: Type)(implicit ctx: Context) =
+ if (ctx.erasedTypes) JavaArrayType(elem)
+ else ArrayType.appliedTo(elem :: Nil)
+ def unapply(tp: Type)(implicit ctx: Context): Option[Type] = tp.dealias match {
+ case at: RefinedType if (at isRef ArrayType.symbol) && at.argInfos.length == 1 => Some(at.argInfos.head)
+ case _ => None
+ }
+ }
+
+ /** An extractor for multi-dimensional arrays.
+ * Note that this will also extract the high bound if an
+ * element type is a wildcard. E.g.
+ *
+ * Array[_ <: Array[_ <: Number]]
+ *
+ * would match
+ *
+ * MultiArrayOf(<Number>, 2)
+ */
+ object MultiArrayOf {
+ def apply(elem: Type, ndims: Int)(implicit ctx: Context): Type =
+ if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1))
+ def unapply(tp: Type)(implicit ctx: Context): Option[(Type, Int)] = tp match {
+ case ArrayOf(elemtp) =>
+ def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match {
+ case TypeBounds(lo, hi) => recur(hi)
+ case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1)
+ case _ => Some(elemtp, 1)
+ }
+ recur(elemtp)
+ case _ =>
+ None
+ }
+ }
+
+ // ----- Symbol sets ---------------------------------------------------
+
+ lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxAbstractFunctionArity, 0)
+ val AbstractFunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => AbstractFunctionType.map(_.symbol.asClass))
+ def AbstractFunctionClass(n: Int)(implicit ctx: Context) = AbstractFunctionClassPerRun()(ctx)(n)
+ lazy val FunctionType = mkArityArray("scala.Function", MaxFunctionArity, 0)
+ def FunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => FunctionType.map(_.symbol.asClass))
+ def FunctionClass(n: Int)(implicit ctx: Context) = FunctionClassPerRun()(ctx)(n)
+ lazy val Function0_applyR = FunctionType(0).symbol.requiredMethodRef(nme.apply)
+ def Function0_apply(implicit ctx: Context) = Function0_applyR.symbol
+
+ lazy val TupleType = mkArityArray("scala.Tuple", MaxTupleArity, 2)
+ lazy val ProductNType = mkArityArray("scala.Product", MaxTupleArity, 0)
+
+ private lazy val FunctionTypes: Set[TypeRef] = FunctionType.toSet
+ private lazy val TupleTypes: Set[TypeRef] = TupleType.toSet
+ private lazy val ProductTypes: Set[TypeRef] = ProductNType.toSet
+
+ /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */
+ def scalaClassName(cls: Symbol)(implicit ctx: Context): TypeName =
+ if (cls.isClass && cls.owner == ScalaPackageClass) cls.asClass.name else EmptyTypeName
+
+ /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */
+ def scalaClassName(ref: Type)(implicit ctx: Context): TypeName = scalaClassName(ref.classSymbol)
+
+ private def isVarArityClass(cls: Symbol, prefix: Name) = {
+ val name = scalaClassName(cls)
+ name.startsWith(prefix) && name.drop(prefix.length).forall(_.isDigit)
+ }
+
+ def isBottomClass(cls: Symbol) =
+ cls == NothingClass || cls == NullClass
+ def isBottomType(tp: Type) =
+ tp.derivesFrom(NothingClass) || tp.derivesFrom(NullClass)
+
+ def isFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.Function)
+ def isAbstractFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.AbstractFunction)
+ def isTupleClass(cls: Symbol) = isVarArityClass(cls, tpnme.Tuple)
+ def isProductClass(cls: Symbol) = isVarArityClass(cls, tpnme.Product)
+
+ val StaticRootImportFns = List[() => TermRef](
+ () => JavaLangPackageVal.termRef,
+ () => ScalaPackageVal.termRef
+ )
+
+ val PredefImportFns = List[() => TermRef](
+ () => ScalaPredefModuleRef,
+ () => DottyPredefModuleRef
+ )
+
+ lazy val RootImportFns =
+ if (ctx.settings.YnoImports.value) List.empty[() => TermRef]
+ else if (ctx.settings.YnoPredef.value) StaticRootImportFns
+ else StaticRootImportFns ++ PredefImportFns
+
+ lazy val RootImportTypes = RootImportFns.map(_())
+
+ /** Modules whose members are in the default namespace and their module classes */
+ lazy val UnqualifiedOwnerTypes: Set[NamedType] =
+ RootImportTypes.toSet[NamedType] ++ RootImportTypes.map(_.symbol.moduleClass.typeRef)
+
+ lazy val PhantomClasses = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
+
+ def isPolymorphicAfterErasure(sym: Symbol) =
+ (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf)
+
+ def isTupleType(tp: Type)(implicit ctx: Context) = {
+ val arity = tp.dealias.argInfos.length
+ arity <= MaxTupleArity && TupleType(arity) != null && (tp isRef TupleType(arity).symbol)
+ }
+
+ def tupleType(elems: List[Type]) = {
+ TupleType(elems.size).appliedTo(elems)
+ }
+
+ def isProductSubType(tp: Type)(implicit ctx: Context) =
+ (tp derivesFrom ProductType.symbol) && tp.baseClasses.exists(isProductClass)
+
+ def isFunctionType(tp: Type)(implicit ctx: Context) = {
+ val arity = functionArity(tp)
+ 0 <= arity && arity <= MaxFunctionArity && (tp isRef FunctionType(arity).symbol)
+ }
+
+ def functionArity(tp: Type)(implicit ctx: Context) = tp.dealias.argInfos.length - 1
+
+ // ----- primitive value class machinery ------------------------------------------
+
+ /** This class would also be obviated by the implicit function type design */
+ class PerRun[T](generate: Context => T) {
+ private var current: RunId = NoRunId
+ private var cached: T = _
+ def apply()(implicit ctx: Context): T = {
+ if (current != ctx.runId) {
+ cached = generate(ctx)
+ current = ctx.runId
+ }
+ cached
+ }
+ }
+
+ lazy val ScalaNumericValueTypeList = List(
+ ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType)
+
+ private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet
+ private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes + UnitType + BooleanType
+ private lazy val ScalaBoxedTypes = ScalaValueTypes map (t => boxedTypes(t.name))
+
+ val ScalaNumericValueClasses = new PerRun[collection.Set[Symbol]](implicit ctx => ScalaNumericValueTypes.map(_.symbol))
+ val ScalaValueClasses = new PerRun[collection.Set[Symbol]](implicit ctx => ScalaValueTypes.map(_.symbol))
+ val ScalaBoxedClasses = new PerRun[collection.Set[Symbol]](implicit ctx => ScalaBoxedTypes.map(_.symbol))
+
+ private val boxedTypes = mutable.Map[TypeName, TypeRef]()
+ private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]()
+
+// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]()
+// private val javaTypeToValueTypeRef = mutable.Map[Class[_], TypeRef]()
+// private val valueTypeNameToJavaType = mutable.Map[TypeName, Class[_]]()
+
+ private def valueTypeRef(name: String, boxed: TypeRef, jtype: Class[_], enc: Int): TypeRef = {
+ val vcls = ctx.requiredClassRef(name)
+ boxedTypes(vcls.name) = boxed
+ valueTypeEnc(vcls.name) = enc
+// unboxedTypeRef(boxed.name) = vcls
+// javaTypeToValueTypeRef(jtype) = vcls
+// valueTypeNameToJavaType(vcls.name) = jtype
+ vcls
+ }
+
+ /** The type of the boxed class corresponding to primitive value type `tp`. */
+ def boxedType(tp: Type)(implicit ctx: Context): TypeRef = boxedTypes(scalaClassName(tp))
+
+ def wrapArrayMethodName(elemtp: Type): TermName = {
+ val cls = elemtp.classSymbol
+ if (cls.isPrimitiveValueClass) nme.wrapXArray(cls.name)
+ else if (cls.derivesFrom(ObjectClass) && !cls.isPhantomClass) nme.wrapRefArray
+ else nme.genericWrapArray
+ }
+
+ type PrimitiveClassEnc = Int
+
+ val ByteEnc = 2
+ val ShortEnc = ByteEnc * 3
+ val CharEnc = 5
+ val IntEnc = ShortEnc * CharEnc
+ val LongEnc = IntEnc * 7
+ val FloatEnc = LongEnc * 11
+ val DoubleEnc = FloatEnc * 13
+ val BooleanEnc = 17
+ val UnitEnc = 19
+
+ def isValueSubType(tref1: TypeRef, tref2: TypeRef)(implicit ctx: Context) =
+ valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0
+ def isValueSubClass(sym1: Symbol, sym2: Symbol) =
+ valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0
+
+ // ----- Initialization ---------------------------------------------------
+
+ /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticScalaClasses = List(
+ AnyClass,
+ AnyRefAlias,
+ RepeatedParamClass,
+ ByNameParamClass2x,
+ AnyValClass,
+ NullClass,
+ NothingClass,
+ SingletonClass,
+ EqualsPatternClass)
+
+ lazy val syntheticCoreClasses = syntheticScalaClasses ++ List(
+ EmptyPackageVal,
+ OpsPackageClass)
+
+ /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreMethods = AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod)
+
+ lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet
+
+ private[this] var _isInitialized = false
+ private def isInitialized = _isInitialized
+
+ def init()(implicit ctx: Context) = {
+ this.ctx = ctx
+ if (!_isInitialized) {
+ // force initialization of every symbol that is synthesized or hijacked by the compiler
+ val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses()
+
+ // Enter all symbols from the scalaShadowing package in the scala package
+ for (m <- ScalaShadowingPackageClass.info.decls)
+ ScalaPackageClass.enter(m)
+
+ _isInitialized = true
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
new file mode 100644
index 000000000..02d27ea33
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
@@ -0,0 +1,78 @@
+package dotty.tools.dotc
+package core
+
+import Periods._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Denotations._
+import Phases._
+import java.lang.AssertionError
+import dotty.tools.dotc.util.DotClass
+
+object DenotTransformers {
+
+ /** A transformer group contains a sequence of transformers,
+ * ordered by the phase where they apply. Transformers are added
+ * to a group via `install`.
+ */
+
+ /** A transformer transforms denotations at a given phase */
+ trait DenotTransformer extends Phase {
+
+ /** The last phase during which the transformed denotations are valid */
+ def lastPhaseId(implicit ctx: Context) = ctx.nextDenotTransformerId(id + 1)
+
+ /** The validity period of the transformer in the given context */
+ def validFor(implicit ctx: Context): Period =
+ Period(ctx.runId, id, lastPhaseId)
+
+ /** The transformation method */
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation
+ }
+
+ /** A transformer that only transforms the info field of denotations */
+ trait InfoTransformer extends DenotTransformer {
+
+ def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type
+
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = {
+ val sym = ref.symbol
+ if (sym.exists && !mayChange(sym)) ref
+ else {
+ val info1 = transformInfo(ref.info, ref.symbol)
+ if (info1 eq ref.info) ref
+ else ref match {
+ case ref: SymDenotation => ref.copySymDenotation(info = info1)
+ case _ => ref.derivedSingleDenotation(ref.symbol, info1)
+ }
+ }
+ }
+
+ /** Denotations with a symbol where `mayChange` is false are guaranteed to be
+ * unaffected by this transform, so `transformInfo` need not be run. This
+ * can save time, and more importantly, can help avoid forcing symbol completers.
+ */
+ protected def mayChange(sym: Symbol)(implicit ctx: Context): Boolean = true
+ }
+
+ /** A transformer that only transforms SymDenotations */
+ trait SymTransformer extends DenotTransformer {
+
+ def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation
+
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref match {
+ case ref: SymDenotation => transformSym(ref)
+ case _ => ref
+ }
+ }
+
+ /** A `DenotTransformer` trait that has the identity as its `transform` method.
+ * You might want to inherit from this trait so that new denotations can be
+ * installed using `installAfter` and `enteredAfter` at the end of the phase.
+ */
+ trait IdentityDenotTransformer extends DenotTransformer {
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala
new file mode 100644
index 000000000..6a39c5787
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala
@@ -0,0 +1,1217 @@
+package dotty.tools
+package dotc
+package core
+
+import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation }
+import Contexts.{Context, ContextBase}
+import Names.{Name, PreName}
+import Names.TypeName
+import StdNames._
+import Symbols.NoSymbol
+import Symbols._
+import Types._
+import Periods._
+import Flags._
+import DenotTransformers._
+import Decorators._
+import dotc.transform.Erasure
+import printing.Texts._
+import printing.Printer
+import io.AbstractFile
+import config.Config
+import util.common._
+import collection.mutable.ListBuffer
+import Decorators.SymbolIteratorDecorator
+
+/** Denotations represent the meaning of symbols and named types.
+ * The following diagram shows how the principal types of denotations
+ * and their denoting entities relate to each other. Lines ending in
+ * a down-arrow `v` are member methods. The two methods shown in the diagram are
+ * "symbol" and "deref". Both methods are parameterized by the current context,
+ * and are effectively indexed by current period.
+ *
+ * Lines ending in a horizontal line mean subtying (right is a subtype of left).
+ *
+ * NamedType------TermRefWithSignature
+ * | | Symbol---------ClassSymbol
+ * | | | |
+ * | denot | denot | denot | denot
+ * v v v v
+ * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation
+ * | |
+ * +-----MultiDenotation |
+ * |
+ * +--UniqueRefDenotation
+ * +--JointRefDenotation
+ *
+ * Here's a short summary of the classes in this diagram.
+ *
+ * NamedType A type consisting of a prefix type and a name, with fields
+ * prefix: Type
+ * name: Name
+ * It has two subtypes: TermRef and TypeRef
+ * TermRefWithSignature A TermRef that has in addition a signature to select an overloaded variant, with new field
+ * sig: Signature
+ * Symbol A label for a definition or declaration in one compiler run
+ * ClassSymbol A symbol representing a class
+ * Denotation The meaning of a named type or symbol during a period
+ * MultiDenotation A denotation representing several overloaded members
+ * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields
+ * symbol: Symbol
+ * info: Type
+ * UniqueRefDenotation A denotation referring to a single definition with some member type
+ * JointRefDenotation A denotation referring to a member that could resolve to several definitions
+ * SymDenotation A denotation representing a single definition with its original type, with main fields
+ * name: Name
+ * owner: Symbol
+ * flags: Flags
+ * privateWithin: Symbol
+ * annotations: List[Annotation]
+ * ClassDenotation A denotation representing a single class definition.
+ */
+object Denotations {
+
+ implicit def eqDenotation: Eq[Denotation, Denotation] = Eq
+
+ /** A denotation is the result of resolving
+ * a name (either simple identifier or select) during a given period.
+ *
+ * Denotations can be combined with `&` and `|`.
+ * & is conjunction, | is disjunction.
+ *
+ * `&` will create an overloaded denotation from two
+ * non-overloaded denotations if their signatures differ.
+ * Analogously `|` of two denotations with different signatures will give
+ * an empty denotation `NoDenotation`.
+ *
+ * A denotation might refer to `NoSymbol`. This is the case if the denotation
+ * was produced from a disjunction of two denotations with different symbols
+ * and there was no common symbol in a superclass that could substitute for
+ * both symbols. Here is an example:
+ *
+ * Say, we have:
+ *
+ * class A { def f: A }
+ * class B { def f: B }
+ * val x: A | B = if (test) new A else new B
+ * val y = x.f
+ *
+ * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`.
+ *
+ * @param symbol The referencing symbol, or NoSymbol is none exists
+ */
+ abstract class Denotation(val symbol: Symbol) extends util.DotClass with printing.Showable {
+
+ /** The type info of the denotation, exists only for non-overloaded denotations */
+ def info(implicit ctx: Context): Type
+
+ /** The type info, or, if this is a SymDenotation where the symbol
+ * is not yet completed, the completer
+ */
+ def infoOrCompleter: Type
+
+ /** The period during which this denotation is valid. */
+ def validFor: Period
+
+ /** Is this a reference to a type symbol? */
+ def isType: Boolean
+
+ /** Is this a reference to a term symbol? */
+ def isTerm: Boolean = !isType
+
+ /** Is this denotation overloaded? */
+ final def isOverloaded = isInstanceOf[MultiDenotation]
+
+ /** The signature of the denotation. */
+ def signature(implicit ctx: Context): Signature
+
+ /** Resolve overloaded denotation to pick the ones with the given signature
+ * when seen from prefix `site`.
+ * @param relaxed When true, consider only parameter signatures for a match.
+ */
+ def atSignature(sig: Signature, site: Type = NoPrefix, relaxed: Boolean = false)(implicit ctx: Context): Denotation
+
+ /** The variant of this denotation that's current in the given context.
+ * If no such denotation exists, returns the denotation with each alternative
+ * at its first point of definition.
+ */
+ def current(implicit ctx: Context): Denotation
+
+ /** Is this denotation different from NoDenotation or an ErrorDenotation? */
+ def exists: Boolean = true
+
+ /** A denotation with the info of this denotation transformed using `f` */
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation
+
+ /** If this denotation does not exist, fallback to alternative */
+ final def orElse(that: => Denotation) = if (this.exists) this else that
+
+ /** The set of alternative single-denotations making up this denotation */
+ final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue)
+
+ /** The alternatives of this denotation that satisfy the predicate `p`. */
+ def altsWith(p: Symbol => Boolean): List[SingleDenotation]
+
+ /** The unique alternative of this denotation that satisfies the predicate `p`,
+ * or NoDenotation if no satisfying alternative exists.
+ * @throws TypeError if there is at more than one alternative that satisfies `p`.
+ */
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation
+
+ /** If this is a SingleDenotation, return it, otherwise throw a TypeError */
+ def checkUnique(implicit ctx: Context): SingleDenotation = suchThat(alwaysTrue)
+
+ /** Does this denotation have an alternative that satisfies the predicate `p`? */
+ def hasAltWith(p: SingleDenotation => Boolean): Boolean
+
+ /** The denotation made up from the alternatives of this denotation that
+ * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists.
+ */
+ def accessibleFrom(pre: Type, superAccess: Boolean = false)(implicit ctx: Context): Denotation
+
+ /** Find member of this denotation with given name and
+ * produce a denotation that contains the type of the member
+ * as seen from given prefix `pre`. Exclude all members that have
+ * flags in `excluded` from consideration.
+ */
+ def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation =
+ info.findMember(name, pre, excluded)
+
+ /** If this denotation is overloaded, filter with given predicate.
+ * If result is still overloaded throw a TypeError.
+ * Note: disambiguate is slightly different from suchThat in that
+ * single-denotations that do not satisfy the predicate are left alone
+ * (whereas suchThat would map them to NoDenotation).
+ */
+ def disambiguate(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation = this match {
+ case sdenot: SingleDenotation => sdenot
+ case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives)
+ }
+
+ /** Return symbol in this denotation that satisfies the given predicate.
+ * if generateStubs is specified, return a stubsymbol if denotation is a missing ref.
+ * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches.
+ */
+ def requiredSymbol(p: Symbol => Boolean, source: AbstractFile = null, generateStubs: Boolean = true)(implicit ctx: Context): Symbol =
+ disambiguate(p) match {
+ case m @ MissingRef(ownerd, name) =>
+ if (generateStubs) {
+ m.ex.printStackTrace()
+ ctx.newStubSymbol(ownerd.symbol, name, source)
+ }
+ else NoSymbol
+ case NoDenotation | _: NoQualifyingRef =>
+ throw new TypeError(s"None of the alternatives of $this satisfies required predicate")
+ case denot =>
+ denot.symbol
+ }
+
+ def requiredMethod(name: PreName)(implicit ctx: Context): TermSymbol =
+ info.member(name.toTermName).requiredSymbol(_ is Method).asTerm
+ def requiredMethodRef(name: PreName)(implicit ctx: Context): TermRef =
+ requiredMethod(name).termRef
+
+ def requiredMethod(name: PreName, argTypes: List[Type])(implicit ctx: Context): TermSymbol =
+ info.member(name.toTermName).requiredSymbol(x=>
+ (x is Method) && x.info.paramTypess == List(argTypes)
+ ).asTerm
+ def requiredMethodRef(name: PreName, argTypes: List[Type])(implicit ctx: Context): TermRef =
+ requiredMethod(name, argTypes).termRef
+
+ def requiredValue(name: PreName)(implicit ctx: Context): TermSymbol =
+ info.member(name.toTermName).requiredSymbol(_.info.isParameterless).asTerm
+ def requiredValueRef(name: PreName)(implicit ctx: Context): TermRef =
+ requiredValue(name).termRef
+
+ def requiredClass(name: PreName)(implicit ctx: Context): ClassSymbol =
+ info.member(name.toTypeName).requiredSymbol(_.isClass).asClass
+
+ /** The alternative of this denotation that has a type matching `targetType` when seen
+ * as a member of type `site`, `NoDenotation` if none exists.
+ */
+ def matchingDenotation(site: Type, targetType: Type)(implicit ctx: Context): SingleDenotation = {
+ def qualifies(sym: Symbol) = site.memberInfo(sym).matchesLoosely(targetType)
+ if (isOverloaded) {
+ atSignature(targetType.signature, site, relaxed = true) match {
+ case sd: SingleDenotation => sd.matchingDenotation(site, targetType)
+ case md => md.suchThat(qualifies(_))
+ }
+ }
+ else if (exists && !qualifies(symbol)) NoDenotation
+ else asSingleDenotation
+ }
+
+ /** Handle merge conflict by throwing a `MergeError` exception */
+ private def mergeConflict(tp1: Type, tp2: Type)(implicit ctx: Context): Type = {
+ def showType(tp: Type) = tp match {
+ case ClassInfo(_, cls, _, _, _) => cls.showLocated
+ case bounds: TypeBounds => i"type bounds $bounds"
+ case _ => tp.show
+ }
+ if (true) throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2)
+ else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") // flip condition for debugging
+ }
+
+ /** Merge two lists of names. If names in corresponding positions match, keep them,
+ * otherwise generate new synthetic names.
+ */
+ def mergeNames[N <: Name](names1: List[N], names2: List[N], syntheticName: Int => N): List[N] = {
+ for ((name1, name2, idx) <- (names1, names2, 0 until names1.length).zipped)
+ yield if (name1 == name2) name1 else syntheticName(idx)
+ }.toList
+
+ /** Form a denotation by conjoining with denotation `that`.
+ *
+ * NoDenotations are dropped. MultiDenotations are handled by merging
+ * parts with same signatures. SingleDenotations with equal signatures
+ * are joined as follows:
+ *
+ * In a first step, consider only those denotations which have symbols
+ * that are accessible from prefix `pre`.
+ *
+ * If there are several such denotations, try to pick one by applying the following
+ * three precedence rules in decreasing order of priority:
+ *
+ * 1. Prefer denotations with more specific infos.
+ * 2. If infos are equally specific, prefer denotations with concrete symbols over denotations
+ * with abstract symbols.
+ * 3. If infos are equally specific and symbols are equally concrete,
+ * prefer denotations with symbols defined in subclasses
+ * over denotations with symbols defined in proper superclasses.
+ *
+ * If there is exactly one (preferred) accessible denotation, return it.
+ *
+ * If there is no preferred accessible denotation, return a JointRefDenotation
+ * with one of the operand symbols (unspecified which one), and an info which
+ * is the intersection (using `&` or `safe_&` if `safeIntersection` is true)
+ * of the infos of the operand denotations.
+ *
+ * If SingleDenotations with different signatures are joined, return NoDenotation.
+ */
+ def & (that: Denotation, pre: Type, safeIntersection: Boolean = false)(implicit ctx: Context): Denotation = {
+
+ /** Normally, `tp1 & tp2`. Special cases for matching methods and classes, with
+ * the possibility of raising a merge error.
+ */
+ def infoMeet(tp1: Type, tp2: Type): Type = {
+ if (tp1 eq tp2) tp1
+ else tp1 match {
+ case tp1: TypeBounds =>
+ tp2 match {
+ case tp2: TypeBounds => if (safeIntersection) tp1 safe_& tp2 else tp1 & tp2
+ case tp2: ClassInfo if tp1 contains tp2 => tp2
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix)
+ case tp2: TypeBounds if tp2 contains tp1 => tp1
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1 @ MethodType(names1, formals1) if isTerm =>
+ tp2 match {
+ case tp2 @ MethodType(names2, formals2) if ctx.typeComparer.matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit =>
+ tp1.derivedMethodType(
+ mergeNames(names1, names2, nme.syntheticParamName),
+ formals1,
+ infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1)))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case tp1: PolyType if isTerm =>
+ tp2 match {
+ case tp2: PolyType if ctx.typeComparer.matchingTypeParams(tp1, tp2) =>
+ tp1.derivedPolyType(
+ mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
+ tp1.paramBounds,
+ infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1)))
+ case _: MethodicType =>
+ mergeConflict(tp1, tp2)
+ }
+ case _ =>
+ tp1 & tp2
+ }
+ }
+
+ /** Try to merge denot1 and denot2 without adding a new signature. */
+ def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match {
+ case denot1 @ MultiDenotation(denot11, denot12) =>
+ val d1 = mergeDenot(denot11, denot2)
+ if (d1.exists) denot1.derivedMultiDenotation(d1, denot12)
+ else {
+ val d2 = mergeDenot(denot12, denot2)
+ if (d2.exists) denot1.derivedMultiDenotation(denot11, d2)
+ else NoDenotation
+ }
+ case denot1: SingleDenotation =>
+ if (denot1 eq denot2) denot1
+ else if (denot1.matches(denot2)) mergeSingleDenot(denot1, denot2)
+ else NoDenotation
+ }
+
+ /** Try to merge single-denotations. */
+ def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): SingleDenotation = {
+ val info1 = denot1.info
+ val info2 = denot2.info
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+
+ val sym2Accessible = sym2.isAccessibleFrom(pre)
+
+ /** Does `sym1` come before `sym2` in the linearization of `pre`? */
+ def precedes(sym1: Symbol, sym2: Symbol) = {
+ def precedesIn(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 => (sym1 eq bc) || !(sym2 eq bc) && precedesIn(bcs1)
+ case Nil => true
+ }
+ (sym1 ne sym2) &&
+ (sym1.derivesFrom(sym2) ||
+ !sym2.derivesFrom(sym1) && precedesIn(pre.baseClasses))
+ }
+
+ /** Similar to SymDenotation#accessBoundary, but without the special cases. */
+ def accessBoundary(sym: Symbol) =
+ if (sym.is(Private)) sym.owner
+ else sym.privateWithin.orElse(
+ if (sym.is(Protected)) sym.owner.enclosingPackageClass
+ else defn.RootClass)
+
+ /** Establish a partial order "preference" order between symbols.
+ * Give preference to `sym1` over `sym2` if one of the following
+ * conditions holds, in decreasing order of weight:
+ * 1. sym1 is concrete and sym2 is abstract
+ * 2. The owner of sym1 comes before the owner of sym2 in the linearization
+ * of the type of the prefix `pre`.
+ * 3. The access boundary of sym2 is properly contained in the access
+ * boundary of sym1. For protected access, we count the enclosing
+ * package as access boundary.
+ * 4. sym1 a method but sym2 is not.
+ * The aim of these criteria is to give some disambiguation on access which
+ * - does not depend on textual order or other arbitrary choices
+ * - minimizes raising of doubleDef errors
+ */
+ def preferSym(sym1: Symbol, sym2: Symbol) =
+ sym1.eq(sym2) ||
+ sym1.isAsConcrete(sym2) &&
+ (!sym2.isAsConcrete(sym1) ||
+ precedes(sym1.owner, sym2.owner) ||
+ accessBoundary(sym2).isProperlyContainedIn(accessBoundary(sym1)) ||
+ sym1.is(Method) && !sym2.is(Method)) ||
+ sym1.info.isErroneous
+
+ /** Sym preference provided types also override */
+ def prefer(sym1: Symbol, sym2: Symbol, info1: Type, info2: Type) =
+ preferSym(sym1, sym2) && info1.overrides(info2)
+
+ def handleDoubleDef =
+ if (preferSym(sym1, sym2)) denot1
+ else if (preferSym(sym2, sym1)) denot2
+ else doubleDefError(denot1, denot2, pre)
+
+ if (sym2Accessible && prefer(sym2, sym1, info2, info1)) denot2
+ else {
+ val sym1Accessible = sym1.isAccessibleFrom(pre)
+ if (sym1Accessible && prefer(sym1, sym2, info1, info2)) denot1
+ else if (sym1Accessible && sym2.exists && !sym2Accessible) denot1
+ else if (sym2Accessible && sym1.exists && !sym1Accessible) denot2
+ else if (isDoubleDef(sym1, sym2)) handleDoubleDef
+ else {
+ val sym =
+ if (!sym1.exists) sym2
+ else if (!sym2.exists) sym1
+ else if (preferSym(sym2, sym1)) sym2
+ else sym1
+ val jointInfo =
+ try infoMeet(info1, info2)
+ catch {
+ case ex: MergeError =>
+ if (pre.widen.classSymbol.is(Scala2x) || ctx.scala2Mode)
+ info1 // follow Scala2 linearization -
+ // compare with way merge is performed in SymDenotation#computeMembersNamed
+ else
+ throw new MergeError(s"${ex.getMessage} as members of type ${pre.show}", ex.tp1, ex.tp2)
+ }
+ new JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor)
+ }
+ }
+ }
+
+ if (this eq that) this
+ else if (!this.exists) that
+ else if (!that.exists) this
+ else that match {
+ case that: SingleDenotation =>
+ val r = mergeDenot(this, that)
+ if (r.exists) r else MultiDenotation(this, that)
+ case that @ MultiDenotation(denot1, denot2) =>
+ this & (denot1, pre) & (denot2, pre)
+ }
+ }
+
+ /** Form a choice between this denotation and that one.
+ * @param pre The prefix type of the members of the denotation, used
+ * to determine an accessible symbol if it exists.
+ */
+ def | (that: Denotation, pre: Type)(implicit ctx: Context): Denotation = {
+
+ /** Normally, `tp1 | tp2`. Special cases for matching methods and classes, with
+ * the possibility of raising a merge error.
+ */
+ def infoJoin(tp1: Type, tp2: Type): Type = tp1 match {
+ case tp1: TypeBounds =>
+ tp2 match {
+ case tp2: TypeBounds => tp1 | tp2
+ case tp2: ClassInfo if tp1 contains tp2 => tp1
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix | tp2.prefix)
+ case tp2: TypeBounds if tp2 contains tp1 => tp2
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1 @ MethodType(names1, formals1) =>
+ tp2 match {
+ case tp2 @ MethodType(names2, formals2)
+ if ctx.typeComparer.matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit =>
+ tp1.derivedMethodType(
+ mergeNames(names1, names2, nme.syntheticParamName),
+ formals1, tp1.resultType | tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case tp1: PolyType =>
+ tp2 match {
+ case tp2: PolyType if ctx.typeComparer.matchingTypeParams(tp1, tp2) =>
+ tp1.derivedPolyType(
+ mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
+ tp1.paramBounds, tp1.resultType | tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case _ =>
+ tp1 | tp2
+ }
+
+ def unionDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation =
+ if (denot1.matches(denot2)) {
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+ val info1 = denot1.info
+ val info2 = denot2.info
+ val sameSym = sym1 eq sym2
+ if (sameSym && (info1 frozen_<:< info2)) denot2
+ else if (sameSym && (info2 frozen_<:< info1)) denot1
+ else {
+ val jointSym =
+ if (sameSym) sym1
+ else {
+ val owner2 = if (sym2 ne NoSymbol) sym2.owner else NoSymbol
+ /** Determine a symbol which is overridden by both sym1 and sym2.
+ * Preference is given to accessible symbols.
+ */
+ def lubSym(overrides: Iterator[Symbol], previous: Symbol): Symbol =
+ if (!overrides.hasNext) previous
+ else {
+ val candidate = overrides.next
+ if (owner2 derivesFrom candidate.owner)
+ if (candidate isAccessibleFrom pre) candidate
+ else lubSym(overrides, previous orElse candidate)
+ else
+ lubSym(overrides, previous)
+ }
+ lubSym(sym1.allOverriddenSymbols, NoSymbol)
+ }
+ new JointRefDenotation(
+ jointSym, infoJoin(info1, info2), denot1.validFor & denot2.validFor)
+ }
+ }
+ else NoDenotation
+
+ if (this eq that) this
+ else if (!this.exists) this
+ else if (!that.exists) that
+ else this match {
+ case denot1 @ MultiDenotation(denot11, denot12) =>
+ denot1.derivedMultiDenotation(denot11 | (that, pre), denot12 | (that, pre))
+ case denot1: SingleDenotation =>
+ that match {
+ case denot2 @ MultiDenotation(denot21, denot22) =>
+ denot2.derivedMultiDenotation(this | (denot21, pre), this | (denot22, pre))
+ case denot2: SingleDenotation =>
+ unionDenot(denot1, denot2)
+ }
+ }
+ }
+
+ final def asSingleDenotation = asInstanceOf[SingleDenotation]
+ final def asSymDenotation = asInstanceOf[SymDenotation]
+
+ def toText(printer: Printer): Text = printer.toText(this)
+ }
+
+ /** An overloaded denotation consisting of the alternatives of both given denotations.
+ */
+ case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol) {
+ final def infoOrCompleter = multiHasNot("info")
+ final def info(implicit ctx: Context) = infoOrCompleter
+ final def validFor = denot1.validFor & denot2.validFor
+ final def isType = false
+ final def signature(implicit ctx: Context) = Signature.OverloadedSignature
+ def atSignature(sig: Signature, site: Type, relaxed: Boolean)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.atSignature(sig, site, relaxed), denot2.atSignature(sig, site, relaxed))
+ def current(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.current, denot2.current)
+ def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
+ denot1.altsWith(p) ++ denot2.altsWith(p)
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation = {
+ val sd1 = denot1.suchThat(p)
+ val sd2 = denot2.suchThat(p)
+ if (sd1.exists)
+ if (sd2.exists)
+ if (isDoubleDef(denot1.symbol, denot2.symbol)) doubleDefError(denot1, denot2)
+ else throw new TypeError(s"failure to disambiguate overloaded reference $this")
+ else sd1
+ else sd2
+ }
+ def hasAltWith(p: SingleDenotation => Boolean): Boolean =
+ denot1.hasAltWith(p) || denot2.hasAltWith(p)
+ def accessibleFrom(pre: Type, superAccess: Boolean)(implicit ctx: Context): Denotation = {
+ val d1 = denot1 accessibleFrom (pre, superAccess)
+ val d2 = denot2 accessibleFrom (pre, superAccess)
+ if (!d1.exists) d2
+ else if (!d2.exists) d1
+ else derivedMultiDenotation(d1, d2)
+ }
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.mapInfo(f), denot2.mapInfo(f))
+ def derivedMultiDenotation(d1: Denotation, d2: Denotation) =
+ if ((d1 eq denot1) && (d2 eq denot2)) this else MultiDenotation(d1, d2)
+ override def toString = alternatives.mkString(" <and> ")
+
+ private def multiHasNot(op: String): Nothing =
+ throw new UnsupportedOperationException(
+ s"multi-denotation with alternatives $alternatives does not implement operation $op")
+ }
+
+ /** A non-overloaded denotation */
+ abstract class SingleDenotation(symbol: Symbol) extends Denotation(symbol) with PreDenotation {
+ def hasUniqueSym: Boolean
+ protected def newLikeThis(symbol: Symbol, info: Type): SingleDenotation
+
+ final def signature(implicit ctx: Context): Signature = {
+ if (isType) Signature.NotAMethod // don't force info if this is a type SymDenotation
+ else info match {
+ case info: MethodicType =>
+ try info.signature
+ catch { // !!! DEBUG
+ case scala.util.control.NonFatal(ex) =>
+ ctx.echo(s"cannot take signature of ${info.show}")
+ throw ex
+ }
+ case _ => Signature.NotAMethod
+ }
+ }
+
+ def derivedSingleDenotation(symbol: Symbol, info: Type)(implicit ctx: Context): SingleDenotation =
+ if ((symbol eq this.symbol) && (info eq this.info)) this
+ else newLikeThis(symbol, info)
+
+ def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation =
+ derivedSingleDenotation(symbol, f(info))
+
+ def orElse(that: => SingleDenotation) = if (this.exists) this else that
+
+ def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
+ if (exists && p(symbol)) this :: Nil else Nil
+
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation =
+ if (exists && p(symbol)) this else NoDenotation
+
+ def hasAltWith(p: SingleDenotation => Boolean): Boolean =
+ exists && p(this)
+
+ def accessibleFrom(pre: Type, superAccess: Boolean)(implicit ctx: Context): Denotation =
+ if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation
+
+ def atSignature(sig: Signature, site: Type, relaxed: Boolean)(implicit ctx: Context): SingleDenotation = {
+ val situated = if (site == NoPrefix) this else asSeenFrom(site)
+ val matches = sig.matchDegree(situated.signature) >=
+ (if (relaxed) Signature.ParamMatch else Signature.FullMatch)
+ if (matches) this else NoDenotation
+ }
+
+ // ------ Forming types -------------------------------------------
+
+ /** The TypeRef representing this type denotation at its original location. */
+ def typeRef(implicit ctx: Context): TypeRef =
+ TypeRef(symbol.owner.thisType, symbol.name.asTypeName, this)
+
+ /** The TermRef representing this term denotation at its original location. */
+ def termRef(implicit ctx: Context): TermRef =
+ TermRef(symbol.owner.thisType, symbol.name.asTermName, this)
+
+ /** The TermRef representing this term denotation at its original location
+ * and at signature `NotAMethod`.
+ */
+ def valRef(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(symbol.owner.thisType, symbol.name.asTermName, Signature.NotAMethod, this)
+
+ /** The TermRef representing this term denotation at its original location
+ * at the denotation's signature.
+ * @note Unlike `valRef` and `termRef`, this will force the completion of the
+ * denotation via a call to `info`.
+ */
+ def termRefWithSig(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(symbol.owner.thisType, symbol.name.asTermName, signature, this)
+
+ /** The NamedType representing this denotation at its original location.
+ * Same as either `typeRef` or `termRefWithSig` depending whether this denotes a type or not.
+ */
+ def namedType(implicit ctx: Context): NamedType =
+ if (isType) typeRef else termRefWithSig
+
+ // ------ Transformations -----------------------------------------
+
+ private[this] var myValidFor: Period = Nowhere
+
+ def validFor = myValidFor
+ def validFor_=(p: Period) =
+ myValidFor = p
+
+ /** The next SingleDenotation in this run, with wrap-around from last to first.
+ *
+ * There may be several `SingleDenotation`s with different validity
+ * representing the same underlying definition at different phases.
+ * These are called a "flock". Flock members are generated by
+ * @See current. Flock members are connected in a ring
+ * with their `nextInRun` fields.
+ *
+ * There are the following invariants concerning flock members
+ *
+ * 1) validity periods are non-overlapping
+ * 2) the union of all validity periods is a contiguous
+ * interval.
+ */
+ protected var nextInRun: SingleDenotation = this
+
+ /** The version of this SingleDenotation that was valid in the first phase
+ * of this run.
+ */
+ def initial: SingleDenotation =
+ if (validFor == Nowhere) this
+ else {
+ var current = nextInRun
+ while (current.validFor.code > this.myValidFor.code) current = current.nextInRun
+ current
+ }
+
+ def history: List[SingleDenotation] = {
+ val b = new ListBuffer[SingleDenotation]
+ var current = initial
+ do {
+ b += (current)
+ current = current.nextInRun
+ }
+ while (current ne initial)
+ b.toList
+ }
+
+ /** Invalidate all caches and fields that depend on base classes and their contents */
+ def invalidateInheritedInfo(): Unit = ()
+
+ /** Move validity period of this denotation to a new run. Throw a StaleSymbol error
+ * if denotation is no longer valid.
+ */
+ private def bringForward()(implicit ctx: Context): SingleDenotation = this match {
+ case denot: SymDenotation if ctx.stillValid(denot) =>
+ assert(ctx.runId > validFor.runId || ctx.settings.YtestPickler.value, // mixing test pickler with debug printing can travel back in time
+ s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor")
+ var d: SingleDenotation = denot
+ do {
+ d.validFor = Period(ctx.period.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId)
+ d.invalidateInheritedInfo()
+ d = d.nextInRun
+ } while (d ne denot)
+ this
+ case _ =>
+ if (coveredInterval.containsPhaseId(ctx.phaseId)) {
+ if (ctx.debug) ctx.traceInvalid(this)
+ staleSymbolError
+ }
+ else NoDenotation
+ }
+
+ /** Produce a denotation that is valid for the given context.
+ * Usually called when !(validFor contains ctx.period)
+ * (even though this is not a precondition).
+ * If the runId of the context is the same as runId of this denotation,
+ * the right flock member is located, or, if it does not exist yet,
+ * created by invoking a transformer (@See Transformers).
+ * If the runId's differ, but this denotation is a SymDenotation
+ * and its toplevel owner class or module
+ * is still a member of its enclosing package, then the whole flock
+ * is brought forward to be valid in the new runId. Otherwise
+ * the symbol is stale, which constitutes an internal error.
+ */
+ def current(implicit ctx: Context): SingleDenotation = {
+ val currentPeriod = ctx.period
+ val valid = myValidFor
+ if (valid.code <= 0) {
+ // can happen if we sit on a stale denotation which has been replaced
+ // wholesale by an installAfter; in this case, proceed to the next
+ // denotation and try again.
+ if (validFor == Nowhere && nextInRun.validFor != Nowhere) return nextInRun.current
+ assert(false)
+ }
+
+ if (valid.runId != currentPeriod.runId)
+ if (exists) initial.bringForward.current
+ else this
+ else {
+ var cur = this
+ if (currentPeriod.code > valid.code) {
+ // search for containing period as long as nextInRun increases.
+ var next = nextInRun
+ while (next.validFor.code > valid.code && !(next.validFor contains currentPeriod)) {
+ cur = next
+ next = next.nextInRun
+ }
+ if (next.validFor.code > valid.code) {
+ // in this case, next.validFor contains currentPeriod
+ cur = next
+ cur
+ } else {
+ //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod")
+ // not found, cur points to highest existing variant
+ val nextTransformerId = ctx.nextDenotTransformerId(cur.validFor.lastPhaseId)
+ if (currentPeriod.lastPhaseId <= nextTransformerId)
+ cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId)
+ else {
+ var startPid = nextTransformerId + 1
+ val transformer = ctx.denotTransformers(nextTransformerId)
+ //println(s"transforming $this with $transformer")
+ try {
+ next = transformer.transform(cur)(ctx.withPhase(transformer)).syncWithParents
+ } catch {
+ case ex: CyclicReference =>
+ println(s"error while transforming $this") // DEBUG
+ throw ex
+ }
+ if (next eq cur)
+ startPid = cur.validFor.firstPhaseId
+ else {
+ next match {
+ case next: ClassDenotation =>
+ assert(!next.is(Package), s"illegal transformation of package denotation by transformer ${ctx.withPhase(transformer).phase}")
+ next.resetFlag(Frozen)
+ case _ =>
+ }
+ next.insertAfter(cur)
+ cur = next
+ }
+ cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId)
+ //printPeriods(cur)
+ //println(s"new denot: $cur, valid for ${cur.validFor}")
+ }
+ cur.current // multiple transformations could be required
+ }
+ } else {
+ // currentPeriod < end of valid; in this case a version must exist
+ // but to be defensive we check for infinite loop anyway
+ var cnt = 0
+ while (!(cur.validFor contains currentPeriod)) {
+ //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}")
+ cur = cur.nextInRun
+ // Note: One might be tempted to add a `prev` field to get to the new denotation
+ // more directly here. I tried that, but it degrades rather than improves
+ // performance: Test setup: Compile everything in dotc and immediate subdirectories
+ // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without.
+ cnt += 1
+ if (cnt > MaxPossiblePhaseId)
+ return current(ctx.withPhase(coveredInterval.firstPhaseId))
+ }
+ cur
+ }
+ }
+ }
+
+ private def demandOutsideDefinedMsg(implicit ctx: Context): String =
+ s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}"
+
+ /** Install this denotation to be the result of the given denotation transformer.
+ * This is the implementation of the same-named method in SymDenotations.
+ * It's placed here because it needs access to private fields of SingleDenotation.
+ * @pre Can only be called in `phase.next`.
+ */
+ protected def installAfter(phase: DenotTransformer)(implicit ctx: Context): Unit = {
+ val targetId = phase.next.id
+ if (ctx.phaseId != targetId) installAfter(phase)(ctx.withPhase(phase.next))
+ else {
+ val current = symbol.current
+ // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}")
+ // printPeriods(current)
+ this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId)
+ if (current.validFor.firstPhaseId >= targetId)
+ insertInsteadOf(current)
+ else {
+ current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1)
+ insertAfter(current)
+ }
+ // printPeriods(this)
+ }
+ }
+
+ /** Apply a transformation `f` to all denotations in this group that start at or after
+ * given phase. Denotations are replaced while keeping the same validity periods.
+ */
+ protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(implicit ctx: Context): Unit = {
+ var current = symbol.current
+ while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code))
+ current = current.nextInRun
+ var hasNext = true
+ while ((current.validFor.firstPhaseId >= phase.id) && hasNext) {
+ val current1: SingleDenotation = f(current.asSymDenotation)
+ if (current1 ne current) {
+ current1.validFor = current.validFor
+ current1.insertInsteadOf(current)
+ }
+ hasNext = current1.nextInRun.validFor.code > current1.validFor.code
+ current = current1.nextInRun
+ }
+ }
+
+ /** Insert this denotation so that it follows `prev`. */
+ private def insertAfter(prev: SingleDenotation) = {
+ this.nextInRun = prev.nextInRun
+ prev.nextInRun = this
+ }
+
+ /** Insert this denotation instead of `old`.
+ * Also ensure that `old` refers with `nextInRun` to this denotation
+ * and set its `validFor` field to `NoWhere`. This is necessary so that
+ * references to the old denotation can be brought forward via `current`
+ * to a valid denotation.
+ *
+ * The code to achieve this is subtle in that it works correctly
+ * whether the replaced denotation is the only one in its cycle or not.
+ */
+ private def insertInsteadOf(old: SingleDenotation): Unit = {
+ var prev = old
+ while (prev.nextInRun ne old) prev = prev.nextInRun
+ // order of next two assignments is important!
+ prev.nextInRun = this
+ this.nextInRun = old.nextInRun
+ old.validFor = Nowhere
+ }
+
+ def staleSymbolError(implicit ctx: Context) = {
+ def ownerMsg = this match {
+ case denot: SymDenotation => s"in ${denot.owner}"
+ case _ => ""
+ }
+ def msg = s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}"
+ throw new StaleSymbol(msg)
+ }
+
+ /** The period (interval of phases) for which there exists
+ * a valid denotation in this flock.
+ */
+ def coveredInterval(implicit ctx: Context): Period = {
+ var cur = this
+ var cnt = 0
+ var interval = validFor
+ do {
+ cur = cur.nextInRun
+ cnt += 1
+ assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg)
+ interval |= cur.validFor
+ } while (cur ne this)
+ interval
+ }
+
+ /** For ClassDenotations only:
+ * If caches influenced by parent classes are still valid, the denotation
+ * itself, otherwise a freshly initialized copy.
+ */
+ def syncWithParents(implicit ctx: Context): SingleDenotation = this
+
+ /** Show declaration string; useful for showing declarations
+ * as seen from subclasses.
+ */
+ def showDcl(implicit ctx: Context): String = ctx.dclText(this).show
+
+ override def toString =
+ if (symbol == NoSymbol) symbol.toString
+ else s"<SingleDenotation of type $infoOrCompleter>"
+
+ def definedPeriodsString: String = {
+ var sb = new StringBuilder()
+ var cur = this
+ var cnt = 0
+ do {
+ sb.append(" " + cur.validFor)
+ cur = cur.nextInRun
+ cnt += 1
+ if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this }
+ } while (cur ne this)
+ sb.toString
+ }
+
+ // ------ PreDenotation ops ----------------------------------------------
+
+ final def first = this
+ final def last = this
+ final def toDenot(pre: Type)(implicit ctx: Context): Denotation = this
+ final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym)
+ final def matches(other: SingleDenotation)(implicit ctx: Context): Boolean = {
+ val d = signature.matchDegree(other.signature)
+ d == Signature.FullMatch ||
+ d >= Signature.ParamMatch && info.matches(other.info)
+ }
+ final def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation =
+ if (p(this)) this else NoDenotation
+ final def filterDisjoint(denots: PreDenotation)(implicit ctx: Context): SingleDenotation =
+ if (denots.exists && denots.matches(this)) NoDenotation else this
+ def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(implicit ctx: Context): SingleDenotation =
+ if (hasUniqueSym && prevDenots.containsSym(symbol)) NoDenotation
+ else if (isType) filterDisjoint(ownDenots).asSeenFrom(pre)
+ else asSeenFrom(pre).filterDisjoint(ownDenots)
+ final def filterExcluded(excluded: FlagSet)(implicit ctx: Context): SingleDenotation =
+ if (excluded.isEmpty || !(this overlaps excluded)) this else NoDenotation
+
+ type AsSeenFromResult = SingleDenotation
+ protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = {
+ val symbol = this.symbol
+ val owner = this match {
+ case thisd: SymDenotation => thisd.owner
+ case _ => if (symbol.exists) symbol.owner else NoSymbol
+ }
+ if (!owner.membersNeedAsSeenFrom(pre)) this
+ else derivedSingleDenotation(symbol, info.asSeenFrom(pre, owner))
+ }
+
+ private def overlaps(fs: FlagSet)(implicit ctx: Context): Boolean = this match {
+ case sd: SymDenotation => sd is fs
+ case _ => symbol is fs
+ }
+ }
+
+ abstract class NonSymSingleDenotation(symbol: Symbol) extends SingleDenotation(symbol) {
+ def infoOrCompleter: Type
+ def info(implicit ctx: Context) = infoOrCompleter
+ def isType = infoOrCompleter.isInstanceOf[TypeType]
+ }
+
+ class UniqueRefDenotation(
+ symbol: Symbol,
+ val infoOrCompleter: Type,
+ initValidFor: Period) extends NonSymSingleDenotation(symbol) {
+ validFor = initValidFor
+ override def hasUniqueSym: Boolean = true
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new UniqueRefDenotation(s, i, validFor)
+ }
+
+ class JointRefDenotation(
+ symbol: Symbol,
+ val infoOrCompleter: Type,
+ initValidFor: Period) extends NonSymSingleDenotation(symbol) {
+ validFor = initValidFor
+ override def hasUniqueSym = false
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new JointRefDenotation(s, i, validFor)
+ }
+
+ class ErrorDenotation(implicit ctx: Context) extends NonSymSingleDenotation(NoSymbol) {
+ override def exists = false
+ override def hasUniqueSym = false
+ def infoOrCompleter = NoType
+ validFor = Period.allInRun(ctx.runId)
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = this
+ }
+
+ /** An error denotation that provides more info about the missing reference.
+ * Produced by staticRef, consumed by requiredSymbol.
+ */
+ case class MissingRef(val owner: SingleDenotation, name: Name)(implicit ctx: Context) extends ErrorDenotation {
+ val ex: Exception = new Exception
+ }
+
+ /** An error denotation that provides more info about alternatives
+ * that were found but that do not qualify.
+ * Produced by staticRef, consumed by requiredSymbol.
+ */
+ case class NoQualifyingRef(alts: List[SingleDenotation])(implicit ctx: Context) extends ErrorDenotation
+
+ /** A double definition
+ */
+ def isDoubleDef(sym1: Symbol, sym2: Symbol)(implicit ctx: Context): Boolean =
+ (sym1.exists && sym2.exists &&
+ (sym1 ne sym2) && (sym1.owner eq sym2.owner) &&
+ !sym1.is(Bridge) && !sym2.is(Bridge))
+
+ def doubleDefError(denot1: Denotation, denot2: Denotation, pre: Type = NoPrefix)(implicit ctx: Context): Nothing = {
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+ def fromWhere = if (pre == NoPrefix) "" else i"\nwhen seen as members of $pre"
+ throw new MergeError(
+ i"""cannot merge
+ | $sym1: ${sym1.info} and
+ | $sym2: ${sym2.info};
+ |they are both defined in ${sym1.owner} but have matching signatures
+ | ${denot1.info} and
+ | ${denot2.info}$fromWhere""",
+ denot2.info, denot2.info)
+ }
+
+ // --------------- PreDenotations -------------------------------------------------
+
+ /** A PreDenotation represents a group of single denotations
+ * It is used as an optimization to avoid forming MultiDenotations too eagerly.
+ */
+ trait PreDenotation {
+
+ /** A denotation in the group exists */
+ def exists: Boolean
+
+ /** First/last denotation in the group */
+ def first: Denotation
+ def last: Denotation
+
+ /** Convert to full denotation by &-ing all elements */
+ def toDenot(pre: Type)(implicit ctx: Context): Denotation
+
+ /** Group contains a denotation that refers to given symbol */
+ def containsSym(sym: Symbol): Boolean
+
+ /** Group contains a denotation with given signature */
+ def matches(other: SingleDenotation)(implicit ctx: Context): Boolean
+
+ /** Keep only those denotations in this group which satisfy predicate `p`. */
+ def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation
+
+ /** Keep only those denotations in this group which have a signature
+ * that's not already defined by `denots`.
+ */
+ def filterDisjoint(denots: PreDenotation)(implicit ctx: Context): PreDenotation
+
+ /** Keep only those inherited members M of this predenotation for which the following is true
+ * - M is not marked Private
+ * - If M has a unique symbol, it does not appear in `prevDenots`.
+ * - M's signature as seen from prefix `pre` does not appear in `ownDenots`
+ * Return the denotation as seen from `pre`.
+ * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in
+ * the base class, which shadow any inherited denotations with the same signature.
+ * `prevDenots` are the denotations that are defined in the class or inherited from
+ * a base type which comes earlier in the linearization.
+ */
+ def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(implicit ctx: Context): PreDenotation
+
+ /** Keep only those denotations in this group whose flags do not intersect
+ * with `excluded`.
+ */
+ def filterExcluded(excluded: FlagSet)(implicit ctx: Context): PreDenotation
+
+ private var cachedPrefix: Type = _
+ private var cachedAsSeenFrom: AsSeenFromResult = _
+ private var validAsSeenFrom: Period = Nowhere
+ type AsSeenFromResult <: PreDenotation
+
+ /** The denotation with info(s) as seen from prefix type */
+ final def asSeenFrom(pre: Type)(implicit ctx: Context): AsSeenFromResult =
+ if (Config.cacheAsSeenFrom) {
+ if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) {
+ cachedAsSeenFrom = computeAsSeenFrom(pre)
+ cachedPrefix = pre
+ validAsSeenFrom = ctx.period
+ }
+ cachedAsSeenFrom
+ } else computeAsSeenFrom(pre)
+
+ protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): AsSeenFromResult
+
+ /** The union of two groups. */
+ def union(that: PreDenotation) =
+ if (!this.exists) that
+ else if (!that.exists) this
+ else DenotUnion(this, that)
+ }
+
+ final case class DenotUnion(denots1: PreDenotation, denots2: PreDenotation) extends PreDenotation {
+ assert(denots1.exists && denots2.exists, s"Union of non-existing denotations ($denots1) and ($denots2)")
+ def exists = true
+ def first = denots1.first
+ def last = denots2.last
+ def toDenot(pre: Type)(implicit ctx: Context) =
+ (denots1 toDenot pre) & (denots2 toDenot pre, pre)
+ def containsSym(sym: Symbol) =
+ (denots1 containsSym sym) || (denots2 containsSym sym)
+ def matches(other: SingleDenotation)(implicit ctx: Context): Boolean =
+ denots1.matches(other) || denots2.matches(other)
+ def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation =
+ derivedUnion(denots1 filterWithPredicate p, denots2 filterWithPredicate p)
+ def filterDisjoint(denots: PreDenotation)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1 filterDisjoint denots, denots2 filterDisjoint denots)
+ def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1.mapInherited(ownDenots, prevDenots, pre), denots2.mapInherited(ownDenots, prevDenots, pre))
+ def filterExcluded(excluded: FlagSet)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1.filterExcluded(excluded), denots2.filterExcluded(excluded))
+
+ type AsSeenFromResult = PreDenotation
+ protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1.asSeenFrom(pre), denots2.asSeenFrom(pre))
+ private def derivedUnion(denots1: PreDenotation, denots2: PreDenotation) =
+ if ((denots1 eq this.denots1) && (denots2 eq this.denots2)) this
+ else denots1 union denots2
+ }
+
+ // --------------- Context Base Trait -------------------------------
+
+ trait DenotationsBase { this: ContextBase =>
+
+ /** The current denotation of the static reference given by path,
+ * or a MissingRef or NoQualifyingRef instance, if it does not exist.
+ * if generateStubs is set, generates stubs for missing top-level symbols
+ */
+ def staticRef(path: Name, generateStubs: Boolean = true)(implicit ctx: Context): Denotation = {
+ def recur(path: Name, len: Int): Denotation = {
+ val point = path.lastIndexOf('.', len - 1)
+ val owner =
+ if (point > 0) recur(path.toTermName, point).disambiguate(_.info.isParameterless)
+ else if (path.isTermName) defn.RootClass.denot
+ else defn.EmptyPackageClass.denot
+ if (owner.exists) {
+ val name = path slice (point + 1, len)
+ val result = owner.info.member(name)
+ if (result ne NoDenotation) result
+ else {
+ val alt =
+ if (generateStubs) missingHook(owner.symbol.moduleClass, name)
+ else NoSymbol
+ if (alt.exists) alt.denot
+ else MissingRef(owner, name)
+ }
+ }
+ else owner
+ }
+ recur(path, path.length)
+ }
+
+ /** If we are looking for a non-existing term name in a package,
+ * assume it is a package for which we do not have a directory and
+ * enter it.
+ */
+ def missingHook(owner: Symbol, name: Name)(implicit ctx: Context): Symbol =
+ if ((owner is Package) && name.isTermName)
+ ctx.newCompletePackageSymbol(owner, name.asTermName).entered
+ else
+ NoSymbol
+ }
+
+ /** An exception for accessing symbols that are no longer valid in current run */
+ class StaleSymbol(msg: => String) extends Exception {
+ util.Stats.record("stale symbol")
+ override def getMessage() = msg
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala
new file mode 100644
index 000000000..63fbc98dc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Flags.scala
@@ -0,0 +1,640 @@
+package dotty.tools.dotc.core
+
+import language.implicitConversions
+
+object Flags {
+
+ /** A FlagSet represents a set of flags. Flags are encoded as follows:
+ * The first two bits indicate whether a flagset applies to terms,
+ * to types, or to both. Bits 2..63 are available for properties
+ * and can be doubly used for terms and types.
+ * Combining two FlagSets with `|` will give a FlagSet
+ * that has the intersection of the applicability to terms/types
+ * of the two flag sets. It is checked that the intersection is not empty.
+ */
+ case class FlagSet(val bits: Long) extends AnyVal {
+
+ /** The union of this flag set and the given flag set
+ */
+ def | (that: FlagSet): FlagSet =
+ if (bits == 0) that
+ else if (that.bits == 0) this
+ else {
+ val tbits = bits & that.bits & KINDFLAGS
+ assert(tbits != 0, s"illegal flagset combination: $this and $that")
+ FlagSet(tbits | ((this.bits | that.bits) & ~KINDFLAGS))
+ }
+
+ /** The intersection of this flag set and the given flag set */
+ def & (that: FlagSet) = FlagSet(bits & that.bits)
+
+ /** The intersection of this flag set with the complement of the given flag set */
+ def &~ (that: FlagSet) = {
+ val tbits = bits & KINDFLAGS
+ if ((tbits & that.bits) == 0) this
+ else FlagSet(tbits | ((this.bits & ~that.bits) & ~KINDFLAGS))
+ }
+
+ /** Does this flag set have a non-empty intersection with the given flag set?
+ * This means that both the kind flags and the carrier bits have non-empty intersection.
+ */
+ def is(flags: FlagSet): Boolean = {
+ val fs = bits & flags.bits
+ (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0
+ }
+
+ /** Does this flag set have a non-empty intersection with the given flag set,
+ * and at the same time contain none of the flags in the `butNot` set?
+ */
+ def is(flags: FlagSet, butNot: FlagSet): Boolean = is(flags) && !is(butNot)
+
+ /** Does this flag set have all of the flags in given flag conjunction?
+ * Pre: The intersection of the typeflags of both sets must be non-empty.
+ */
+ def is(flags: FlagConjunction): Boolean = {
+ val fs = bits & flags.bits
+ (fs & KINDFLAGS) != 0 &&
+ (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT)
+ }
+
+ /** Does this flag set have all of the flags in given flag conjunction?
+ * and at the same time contain none of the flags in the `butNot` set?
+ * Pre: The intersection of the typeflags of both sets must be non-empty.
+ */
+ def is(flags: FlagConjunction, butNot: FlagSet): Boolean = is(flags) && !is(butNot)
+
+ def isEmpty = (bits & ~KINDFLAGS) == 0
+
+ /** Is this flag set a subset of that one? */
+ def <= (that: FlagSet) = (bits & that.bits) == bits
+
+ /** Does this flag set apply to terms? */
+ def isTermFlags = (bits & TERMS) != 0
+
+ /** Does this flag set apply to terms? */
+ def isTypeFlags = (bits & TYPES) != 0
+
+ /** This flag set with all flags transposed to be type flags */
+ def toTypeFlags = if (bits == 0) this else FlagSet(bits & ~KINDFLAGS | TYPES)
+
+ /** This flag set with all flags transposed to be term flags */
+ def toTermFlags = if (bits == 0) this else FlagSet(bits & ~KINDFLAGS | TERMS)
+
+ /** This flag set with all flags transposed to be common flags */
+ def toCommonFlags = if (bits == 0) this else FlagSet(bits | KINDFLAGS)
+
+ /** The number of non-kind flags in this set */
+ def numFlags: Int = java.lang.Long.bitCount(bits & ~KINDFLAGS)
+
+ /** The lowest non-kind bit set in this flagset */
+ def firstBit: Int = java.lang.Long.numberOfTrailingZeros(bits & ~KINDFLAGS)
+
+ /** The list of non-empty names of flags with given index idx that are set in this FlagSet */
+ private def flagString(idx: Int): List[String] =
+ if ((bits & (1L << idx)) == 0) Nil
+ else {
+ def halfString(kind: Int) =
+ if ((bits & (1L << kind)) != 0) flagName(idx)(kind) else ""
+ val termFS = halfString(TERMindex)
+ val typeFS = halfString(TYPEindex)
+ val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil)
+ strs filter (_.nonEmpty)
+ }
+
+ /** The list of non-empty names of flags that are set in this FlagSet */
+ def flagStrings: Seq[String] = {
+ val rawStrings = (2 to MaxFlag).flatMap(flagString)
+ if (this is Local)
+ rawStrings.filter(_ != "<local>").map {
+ case "private" => "private[this]"
+ case "protected" => "protected[this]"
+ case str => str
+ }
+ else rawStrings
+ }
+
+ /** The string representation of this flag set */
+ override def toString = flagStrings.mkString(" ")
+ }
+
+ /** A class representing flag sets that should be tested
+ * conjunctively. I.e. for a flag conjunction `fc`,
+ * `x is fc` tests whether `x` contains all flags in `fc`.
+ */
+ case class FlagConjunction(bits: Long) {
+ override def toString = FlagSet(bits).toString
+ }
+
+ private final val TYPESHIFT = 2
+ private final val TERMindex = 0
+ private final val TYPEindex = 1
+ private final val TERMS = 1 << TERMindex
+ private final val TYPES = 1 << TYPEindex
+ private final val KINDFLAGS = TERMS | TYPES
+
+ private final val FirstFlag = 2
+ private final val FirstNotPickledFlag = 48
+ private final val MaxFlag = 63
+
+ private val flagName = Array.fill(64, 2)("")
+
+ private def isDefinedAsFlag(idx: Int) = flagName(idx) exists (_.nonEmpty)
+
+ /** The flag set containing all defined flags of either kind whose bits
+ * lie in the given range
+ */
+ private def flagRange(start: Int, end: Int) =
+ FlagSet((KINDFLAGS.toLong /: (start until end)) ((bits, idx) =>
+ if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits))
+
+ /** The flag with given index between 2 and 63 which applies to terms.
+ * Installs given name as the name of the flag. */
+ private def termFlag(index: Int, name: String): FlagSet = {
+ flagName(index)(TERMindex) = name
+ FlagSet(TERMS | (1L << index))
+ }
+
+ /** The flag with given index between 2 and 63 which applies to types.
+ * Installs given name as the name of the flag. */
+ private def typeFlag(index: Int, name: String): FlagSet = {
+ flagName(index)(TYPEindex) = name
+ FlagSet(TYPES | (1L << index))
+ }
+
+ /** The flag with given index between 2 and 63 which applies to both terms and types
+ * Installs given name as the name of the flag. */
+ private def commonFlag(index: Int, name: String): FlagSet = {
+ flagName(index)(TERMindex) = name
+ flagName(index)(TYPEindex) = name
+ FlagSet(TERMS | TYPES | (1L << index))
+ }
+
+ /** The union of all flags in given flag set */
+ def union(flagss: FlagSet*) = (EmptyFlags /: flagss)(_ | _)
+
+ /** The conjunction of all flags in given flag set */
+ def allOf(flagss: FlagSet*) = {
+ assert(flagss forall (_.numFlags == 1), "Flags.allOf doesn't support flag " + flagss.find(_.numFlags != 1))
+ FlagConjunction(union(flagss: _*).bits)
+ }
+
+ def commonFlags(flagss: FlagSet*) = union(flagss.map(_.toCommonFlags): _*)
+
+ /** The empty flag set */
+ final val EmptyFlags = FlagSet(0)
+
+ /** The undefined flag set */
+ final val UndefinedFlags = FlagSet(~KINDFLAGS)
+
+ // Available flags:
+
+ /** Labeled with `private` modifier */
+ final val Private = commonFlag(2, "private")
+ final val PrivateTerm = Private.toTermFlags
+ final val PrivateType = Private.toTypeFlags
+
+ /** Labeled with `protected` modifier */
+ final val Protected = commonFlag(3, "protected")
+
+ /** Labeled with `override` modifier */
+ final val Override = commonFlag(4, "override")
+
+ /** A declared, but not defined member */
+ final val Deferred = commonFlag(5, "<deferred>")
+ final val DeferredTerm = Deferred.toTermFlags
+ final val DeferredType = Deferred.toTypeFlags
+
+ /** Labeled with `final` modifier */
+ final val Final = commonFlag(6, "final")
+
+ /** A method symbol. */
+ final val MethodOrHKCommon = commonFlag(7, "<method>")
+ final val Method = MethodOrHKCommon.toTermFlags
+ final val HigherKinded = MethodOrHKCommon.toTypeFlags
+
+ /** A (term or type) parameter to a class or method */
+ final val Param = commonFlag(8, "<param>")
+ final val TermParam = Param.toTermFlags
+ final val TypeParam = Param.toTypeFlags
+
+ /** Labeled with `implicit` modifier (implicit value) */
+ final val ImplicitCommon = commonFlag(9, "implicit")
+ final val Implicit = ImplicitCommon.toTermFlags
+
+ /** Labeled with `lazy` (a lazy val). */
+ final val Lazy = termFlag(10, "lazy")
+
+ /** A trait */
+ final val Trait = typeFlag(10, "<trait>")
+
+ final val LazyOrTrait = Lazy.toCommonFlags
+
+ /** A value or variable accessor (getter or setter) */
+ final val Accessor = termFlag(11, "<accessor>")
+
+ /** Labeled with `sealed` modifier (sealed class) */
+ final val Sealed = typeFlag(11, "sealed")
+
+ final val AccessorOrSealed = Accessor.toCommonFlags
+
+ /** A mutable var */
+ final val Mutable = termFlag(12, "mutable")
+
+ /** Symbol is local to current class (i.e. private[this] or protected[this]
+ * pre: Private or Protected are also set
+ */
+ final val Local = commonFlag(13, "<local>")
+
+ /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not),
+ * or an accessor of such a field.
+ */
+ final val ParamAccessor = commonFlag(14, "<paramaccessor>")
+ final val TermParamAccessor = ParamAccessor.toTermFlags
+ final val TypeParamAccessor = ParamAccessor.toTypeFlags
+
+ /** A value or class implementing a module */
+ final val Module = commonFlag(15, "module")
+ final val ModuleVal = Module.toTermFlags
+ final val ModuleClass = Module.toTypeFlags
+
+ /** A value or class representing a package */
+ final val Package = commonFlag(16, "<package>")
+ final val PackageVal = Package.toTermFlags
+ final val PackageClass = Package.toTypeFlags
+
+ /** A case class or its companion object */
+ final val Case = commonFlag(17, "case")
+ final val CaseClass = Case.toTypeFlags
+ final val CaseVal = Case.toTermFlags
+
+ /** A compiler-generated symbol, which is visible for type-checking
+ * (compare with artifact)
+ */
+ final val Synthetic = commonFlag(18, "<synthetic>")
+
+ /** Symbol's name is expanded */
+ final val ExpandedName = commonFlag(19, "<expandedname>")
+
+ /** A covariant type variable / an outer accessor */
+ final val CovariantOrOuter = commonFlag(20, "")
+ final val Covariant = typeFlag(20, "<covariant>")
+ final val OuterAccessor = termFlag(20, "<outer accessor>")
+
+ /** A contravariant type variable / a label method */
+ final val ContravariantOrLabel = commonFlag(21, "")
+ final val Contravariant = typeFlag(21, "<contravariant>")
+ final val Label = termFlag(21, "<label>")
+
+
+ /** A trait that has only abstract methods as members
+ * (and therefore can be represented by a Java interface
+ */
+ final val PureInterface = typeFlag(22, "interface") // TODO when unpickling, reconstitute from context
+
+ /** Labeled with of abstract & override */
+ final val AbsOverride = termFlag(22, "abstract override")
+
+ /** Labeled with `abstract` modifier (an abstract class)
+ * Note: You should never see Abstract on any symbol except a class.
+ * Note: the flag counts as common, because it can be combined with OVERRIDE in a term.
+ */
+ final val Abstract = commonFlag(23, "abstract")
+
+ /** Lazy val or method is known or assumed to be stable and realizable */
+ final val Stable = termFlag(24, "<stable>")
+
+ /** A case parameter accessor */
+ final val CaseAccessor = termFlag(25, "<caseaccessor>")
+
+ /** A binding for a type parameter of a base class or trait.
+ * TODO: Replace with combination of isType, ExpandedName, and Override?
+ */
+ final val BaseTypeArg = typeFlag(25, "<basetypearg>")
+
+ final val CaseAccessorOrBaseTypeArg = CaseAccessor.toCommonFlags
+
+ /** A super accessor */
+ final val SuperAccessor = termFlag(26, "<superaccessor>")
+
+ /** An unpickled Scala 2.x class */
+ final val Scala2x = typeFlag(26, "<scala-2.x>")
+
+ final val SuperAccessorOrScala2x = SuperAccessor.toCommonFlags
+
+ /** A method that has default params */
+ final val DefaultParameterized = termFlag(27, "<defaultparam>")
+
+ /** A type that is defined by a type bind */
+ final val BindDefinedType = typeFlag(27, "<bind-defined>")
+
+ /** Symbol is inlined */
+ final val Inline = commonFlag(29, "inline")
+
+ /** Symbol is defined by a Java class */
+ final val JavaDefined = commonFlag(30, "<java>")
+
+ /** Symbol is implemented as a Java static */
+ final val JavaStatic = commonFlag(31, "<static>")
+ final val JavaStaticTerm = JavaStatic.toTermFlags
+ final val JavaStaticType = JavaStatic.toTypeFlags
+
+ /** Trait does not have fields or initialization code */
+ final val NoInits = typeFlag(32, "<noInits>")
+
+ /** Variable is accessed from nested function. */
+ final val Captured = termFlag(32, "<captured>")
+
+ /** Symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode */
+ final val Artifact = commonFlag(33, "<artifact>")
+
+ /** A bridge method. Set by Erasure */
+ final val Bridge = termFlag(34, "<bridge>")
+
+ /** All class attributes are fully defined */
+ final val FullyCompleted = typeFlag(34, "<fully-completed>")
+
+ /** Symbol is a Java varargs bridge */ // (needed?)
+ final val VBridge = termFlag(35, "<vbridge>") // TODO remove
+
+ /** Symbol is a method which should be marked ACC_SYNCHRONIZED */
+ final val Synchronized = termFlag(36, "<synchronized>")
+
+ /** Symbol is a Java-style varargs method */
+ final val JavaVarargs = termFlag(37, "<varargs>")
+
+ /** Symbol is a Java default method */
+ final val DefaultMethod = termFlag(38, "<defaultmethod>")
+
+ /** Symbol is a Java enum */
+ final val Enum = commonFlag(40, "<enum>")
+
+ // Flags following this one are not pickled
+
+ /** Symbol always defines a fresh named type */
+ final val Fresh = commonFlag(45, "<fresh>")
+
+ /** Symbol is defined in a super call */
+ final val InSuperCall = commonFlag(46, "<in supercall>")
+
+ /** Denotation is in train of being loaded and completed, used to catch cyclic dependencies */
+ final val Touched = commonFlag(48, "<touched>")
+
+ /** Class is not allowed to accept new members because fingerprint of subclass has been taken */
+ final val Frozen = commonFlag(49, "<frozen>")
+
+ /** An error symbol */
+ final val Erroneous = commonFlag(50, "<is-error>")
+
+ /** Class has been lifted out to package level, local value has been lifted out to class level */
+ final val Lifted = commonFlag(51, "<lifted>")
+
+ /** Term member has been mixed in */
+ final val MixedIn = commonFlag(52, "<mixedin>")
+
+ /** Symbol is a generated specialized member */
+ final val Specialized = commonFlag(53, "<specialized>")
+
+ /** Symbol is a self name */
+ final val SelfName = termFlag(54, "<selfname>")
+
+ /** Symbol is an implementation class of a Scala2 trait */
+ final val ImplClass = typeFlag(54, "<implclass>")
+
+ final val SelfNameOrImplClass = SelfName.toCommonFlags
+
+ /** An existentially bound symbol (Scala 2.x only) */
+ final val Scala2ExistentialCommon = commonFlag(55, "<existential>")
+ final val Scala2Existential = Scala2ExistentialCommon.toTypeFlags
+
+ /** An overloaded symbol (Scala 2.x only) */
+ final val Scala2Overloaded = termFlag(56, "<overloaded>")
+
+ /** A module variable (Scala 2.x only) */
+ final val Scala2ModuleVar = termFlag(57, "<modulevar>")
+
+ /** A definition that's initialized before the super call (Scala 2.x only) */
+ final val Scala2PreSuper = termFlag(58, "<presuper>")
+
+ /** A macro (Scala 2.x only) */
+ final val Macro = commonFlag(59, "<macro>")
+
+ /** A method that is known to have inherited default parameters */
+ final val InheritedDefaultParams = termFlag(60, "<inherited-default-param>")
+
+ /** A method that is known to have no default parameters */
+ final val NoDefaultParams = termFlag(61, "<no-default-param>")
+
+ /** A denotation that is valid in all run-ids */
+ final val Permanent = commonFlag(62, "<permanent>")
+
+// --------- Combined Flag Sets and Conjunctions ----------------------
+
+ /** Flags representing source modifiers */
+ final val SourceModifierFlags =
+ commonFlags(Private, Protected, Abstract, Final, Inline,
+ Sealed, Case, Implicit, Override, AbsOverride, Lazy, JavaStatic)
+
+ /** Flags representing modifiers that can appear in trees */
+ final val ModifierFlags =
+ SourceModifierFlags | Module | Param | Synthetic | Package | Local |
+ commonFlags(Mutable)
+ // | Trait is subsumed by commonFlags(Lazy) from SourceModifierFlags
+
+ assert(ModifierFlags.isTermFlags && ModifierFlags.isTypeFlags)
+
+ /** Flags representing access rights */
+ final val AccessFlags = Private | Protected | Local
+
+ /** Flags guaranteed to be set upon symbol creation */
+ final val FromStartFlags =
+ AccessFlags | Module | Package | Deferred | Final | MethodOrHKCommon | Param | ParamAccessor | Scala2ExistentialCommon |
+ Mutable.toCommonFlags | InSuperCall | Touched | JavaStatic | CovariantOrOuter | ContravariantOrLabel | ExpandedName | AccessorOrSealed |
+ CaseAccessorOrBaseTypeArg | Fresh | Frozen | Erroneous | ImplicitCommon | Permanent | Synthetic |
+ Inline | LazyOrTrait | SuperAccessorOrScala2x | SelfNameOrImplClass
+
+ assert(FromStartFlags.isTermFlags && FromStartFlags.isTypeFlags)
+ // TODO: Should check that FromStartFlags do not change in completion
+
+ /** A value that's unstable unless complemented with a Stable flag */
+ final val UnstableValue = Mutable | Method
+
+ /** Flags that express the variance of a type parameter. */
+ final val VarianceFlags = Covariant | Contravariant
+
+ /** Flags that are passed from a type parameter of a class to a refinement symbol
+ * that sets the type parameter */
+ final val RetainedTypeArgFlags = VarianceFlags | ExpandedName | Protected | Local
+
+ /** Modules always have these flags set */
+ final val ModuleCreationFlags = ModuleVal | Lazy | Final | Stable
+
+ /** Module classes always have these flags set */
+ final val ModuleClassCreationFlags = ModuleClass | Final
+
+ /** Accessors always have these flags set */
+ final val AccessorCreationFlags = Method | Accessor
+
+ /** Pure interfaces always have these flags */
+ final val PureInterfaceCreationFlags = Trait | NoInits | PureInterface
+
+ final val NoInitsInterface = NoInits | PureInterface
+
+ /** The flags of the self symbol */
+ final val SelfSymFlags = Private | Local | Deferred
+
+ /** The flags of a class type parameter */
+ final def ClassTypeParamCreationFlags = TypeParam | Deferred | Protected | Local
+
+ /** Flags that can apply to both a module val and a module class, except those that
+ * are added at creation anyway
+ */
+ final val RetainedModuleValAndClassFlags: FlagSet =
+ AccessFlags | Package | Case |
+ Synthetic | ExpandedName | JavaDefined | JavaStatic | Artifact |
+ Erroneous | Lifted | MixedIn | Specialized
+
+ /** Flags that can apply to a module val */
+ final val RetainedModuleValFlags: FlagSet = RetainedModuleValAndClassFlags |
+ Override | Final | Method | Implicit | Lazy |
+ Accessor | AbsOverride | Stable | Captured | Synchronized
+
+ /** Flags that can apply to a module class */
+ final val RetainedModuleClassFlags: FlagSet = RetainedModuleValAndClassFlags |
+ InSuperCall | ImplClass
+
+ /** Packages and package classes always have these flags set */
+ final val PackageCreationFlags =
+ Module | Package | Final | JavaDefined
+
+ /** These flags are pickled */
+ final val PickledFlags = flagRange(FirstFlag, FirstNotPickledFlag)
+
+ final val AnyFlags = flagRange(FirstFlag, MaxFlag)
+
+ /** An abstract class or a trait */
+ final val AbstractOrTrait = Abstract | Trait
+
+ /** Labeled `private` or `protected[local]` */
+ final val PrivateOrLocal = Private | Local
+
+ /** Either a module or a final class */
+ final val ModuleOrFinal = ModuleClass | Final
+
+ /** Either mutable or lazy */
+ final val MutableOrLazy = Mutable | Lazy
+
+ /** Either method or lazy */
+ final val MethodOrLazy = Method | Lazy
+
+ /** Either method or lazy or deferred */
+ final val MethodOrLazyOrDeferred = Method | Lazy | Deferred
+
+ /** Labeled `private`, `final`, or `inline` */
+ final val PrivateOrFinalOrInline = Private | Final | Inline
+
+ /** A private method */
+ final val PrivateMethod = allOf(Private, Method)
+
+ /** A private accessor */
+ final val PrivateAccessor = allOf(Private, Accessor)
+
+ /** A type parameter with synthesized name */
+ final val ExpandedTypeParam = allOf(ExpandedName, TypeParam)
+
+ /** An inline method */
+ final val InlineMethod = allOf(Inline, Method)
+
+ /** An inline parameter */
+ final val InlineParam = allOf(Inline, Param)
+
+ /** A parameter or parameter accessor */
+ final val ParamOrAccessor = Param | ParamAccessor
+
+ /** A lazy or deferred value */
+ final val LazyOrDeferred = Lazy | Deferred
+
+ /** A synthetic or private definition */
+ final val SyntheticOrPrivate = Synthetic | Private
+
+ /** A type parameter or type parameter accessor */
+ final val TypeParamOrAccessor = TypeParam | TypeParamAccessor
+
+ /** A deferred member or a parameter accessor (these don't have right hand sides) */
+ final val DeferredOrParamAccessor = Deferred | ParamAccessor
+
+ /** value that's final or inline */
+ final val FinalOrInline = Final | Inline
+
+ /** If symbol of a type alias has these flags, prefer the alias */
+ final val AliasPreferred = TypeParam | BaseTypeArg | ExpandedName
+
+ /** A covariant type parameter instance */
+ final val LocalCovariant = allOf(Local, Covariant)
+
+ /** A contravariant type parameter instance */
+ final val LocalContravariant = allOf(Local, Contravariant)
+
+ /** Has defined or inherited default parameters */
+ final val HasDefaultParams = DefaultParameterized | InheritedDefaultParams
+
+ /** Is valid forever */
+ final val ValidForever = Package | Permanent | Scala2ExistentialCommon
+
+ /** Is a default parameter in Scala 2*/
+ final val DefaultParameter = allOf(Param, DefaultParameterized)
+
+ /** A trait that does not need to be initialized */
+ final val NoInitsTrait = allOf(Trait, NoInits)
+
+ /** A Java interface, potentially with default methods */
+ final val JavaTrait = allOf(JavaDefined, Trait, NoInits)
+
+ /** A Java interface */ // TODO when unpickling, reconstitute from context
+ final val JavaInterface = allOf(JavaDefined, Trait)
+
+ /** A Java companion object */
+ final val JavaModule = allOf(JavaDefined, Module)
+
+ /** A Java companion object */
+ final val JavaProtected = allOf(JavaDefined, Protected)
+
+ /** Labeled private[this] */
+ final val PrivateLocal = allOf(Private, Local)
+
+ /** A private[this] parameter accessor */
+ final val PrivateLocalParamAccessor = allOf(Private, Local, ParamAccessor)
+
+ /** A parameter forwarder */
+ final val ParamForwarder = allOf(Method, Stable, ParamAccessor)
+
+ /** A private[this] parameter */
+ final val PrivateLocalParam = allOf(Private, Local, Param)
+
+ /** A private parameter accessor */
+ final val PrivateParamAccessor = allOf(Private, ParamAccessor)
+
+ /** A type parameter introduced with [type ... ] */
+ final val NamedTypeParam = allOf(TypeParam, ParamAccessor)
+
+ /** A local parameter */
+ final val ParamAndLocal = allOf(Param, Local)
+
+ /** Labeled protected[this] */
+ final val ProtectedLocal = allOf(Protected, Local)
+
+ /** Java symbol which is `protected` and `static` */
+ final val StaticProtected = allOf(JavaDefined, Protected, JavaStatic)
+
+ final val AbstractFinal = allOf(Abstract, Final)
+ final val AbstractSealed = allOf(Abstract, Sealed)
+ final val SyntheticArtifact = allOf(Synthetic, Artifact)
+ final val SyntheticModule = allOf(Synthetic, Module)
+ final val SyntheticTermParam = allOf(Synthetic, TermParam)
+ final val SyntheticTypeParam = allOf(Synthetic, TypeParam)
+ final val SyntheticCase = allOf(Synthetic, Case)
+ final val AbstractAndOverride = allOf(Abstract, Override)
+ final val Scala2Trait = allOf(Scala2x, Trait)
+
+ implicit def conjToFlagSet(conj: FlagConjunction): FlagSet =
+ FlagSet(conj.bits)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala
new file mode 100644
index 000000000..e4510c53e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala
@@ -0,0 +1,103 @@
+package dotty.tools.dotc
+package core
+
+import Types._
+import scala.util.hashing.{ MurmurHash3 => hashing }
+
+object Hashable {
+
+ /** A hash value indicating that the underlying type is not
+ * cached in uniques.
+ */
+ final val NotCached = 0
+
+ /** An alternative value returned from `hash` if the
+ * computed hashCode would be `NotCached`.
+ */
+ private[core] final val NotCachedAlt = Int.MinValue
+
+ /** A value that indicates that the hash code is unknown
+ */
+ private[core] final val HashUnknown = 1234
+
+ /** An alternative value if computeHash would otherwise yield HashUnknown
+ */
+ private[core] final val HashUnknownAlt = 4321
+}
+
+trait Hashable {
+ import Hashable._
+
+ protected def hashSeed: Int = getClass.hashCode
+
+ protected final def finishHash(hashCode: Int, arity: Int): Int =
+ avoidSpecialHashes(hashing.finalizeHash(hashCode, arity))
+
+ final def identityHash = avoidSpecialHashes(System.identityHashCode(this))
+
+ protected def finishHash(seed: Int, arity: Int, tp: Type): Int = {
+ val elemHash = tp.hash
+ if (elemHash == NotCached) return NotCached
+ finishHash(hashing.mix(seed, elemHash), arity + 1)
+ }
+
+ protected def finishHash(seed: Int, arity: Int, tp1: Type, tp2: Type): Int = {
+ val elemHash = tp1.hash
+ if (elemHash == NotCached) return NotCached
+ finishHash(hashing.mix(seed, elemHash), arity + 1, tp2)
+ }
+
+ protected def finishHash(seed: Int, arity: Int, tps: List[Type]): Int = {
+ var h = seed
+ var xs = tps
+ var len = arity
+ while (xs.nonEmpty) {
+ val elemHash = xs.head.hash
+ if (elemHash == NotCached) return NotCached
+ h = hashing.mix(h, elemHash)
+ xs = xs.tail
+ len += 1
+ }
+ finishHash(h, len)
+ }
+
+ protected def finishHash(seed: Int, arity: Int, tp: Type, tps: List[Type]): Int = {
+ val elemHash = tp.hash
+ if (elemHash == NotCached) return NotCached
+ finishHash(hashing.mix(seed, elemHash), arity + 1, tps)
+ }
+
+ protected final def doHash(x: Any): Int =
+ finishHash(hashing.mix(hashSeed, x.hashCode), 1)
+
+ protected final def doHash(tp: Type): Int =
+ finishHash(hashSeed, 0, tp)
+
+ protected final def doHash(x1: Any, tp2: Type): Int =
+ finishHash(hashing.mix(hashSeed, x1.hashCode), 1, tp2)
+
+ protected final def doHash(tp1: Type, tp2: Type): Int =
+ finishHash(hashSeed, 0, tp1, tp2)
+
+ protected final def doHash(x1: Any, tp2: Type, tp3: Type): Int =
+ finishHash(hashing.mix(hashSeed, x1.hashCode), 1, tp2, tp3)
+
+ protected final def doHash(tp1: Type, tps2: List[Type]): Int =
+ finishHash(hashSeed, 0, tp1, tps2)
+
+ protected final def doHash(x1: Any, tp2: Type, tps3: List[Type]): Int =
+ finishHash(hashing.mix(hashSeed, x1.hashCode), 1, tp2, tps3)
+
+
+ protected final def doHash(x1: Int, x2: Int): Int =
+ finishHash(hashing.mix(hashing.mix(hashSeed, x1), x2), 1)
+
+ protected final def addDelta(elemHash: Int, delta: Int) =
+ if (elemHash == NotCached) NotCached
+ else avoidSpecialHashes(elemHash + delta)
+
+ private def avoidSpecialHashes(h: Int) =
+ if (h == NotCached) NotCachedAlt
+ else if (h == HashUnknown) HashUnknownAlt
+ else h
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala
new file mode 100644
index 000000000..406a84af6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Mode.scala
@@ -0,0 +1,89 @@
+package dotty.tools.dotc.core
+
+/** A collection of mode bits that are part of a context */
+case class Mode(val bits: Int) extends AnyVal {
+ import Mode._
+ def | (that: Mode) = Mode(bits | that.bits)
+ def & (that: Mode) = Mode(bits & that.bits)
+ def &~ (that: Mode) = Mode(bits & ~that.bits)
+ def is (that: Mode) = (bits & that.bits) == that.bits
+
+ def isExpr = (this & PatternOrType) == None
+
+ override def toString =
+ (0 until 31).filter(i => (bits & (1 << i)) != 0).map(modeName).mkString("Mode(", ",", ")")
+}
+
+object Mode {
+ val None = Mode(0)
+
+ private val modeName = new Array[String](32)
+
+ def newMode(bit: Int, name: String): Mode = {
+ modeName(bit) = name
+ Mode(1 << bit)
+ }
+
+ val Pattern = newMode(0, "Pattern")
+ val Type = newMode(1, "Type")
+
+ val ImplicitsEnabled = newMode(2, "ImplicitsEnabled")
+ val InferringReturnType = newMode(3, "InferringReturnType")
+
+ /** This mode bit is set if we collect information without reference to a valid
+ * context with typerstate and constraint. This is typically done when we
+ * cache the eligibility of implicits. Caching needs to be done across different constraints.
+ * Therefore, if TypevarsMissContext is set, subtyping becomes looser, and assumes
+ * that PolyParams can be sub- and supertypes of anything. See TypeComparer.
+ */
+ val TypevarsMissContext = newMode(4, "TypevarsMissContext")
+ val CheckCyclic = newMode(5, "CheckCyclic")
+
+ val InSuperCall = newMode(6, "InSuperCall")
+
+ /** Allow GADTFlexType labelled types to have their bounds adjusted */
+ val GADTflexible = newMode(8, "GADTflexible")
+
+ /** Allow dependent functions. This is currently necessary for unpickling, because
+ * some dependent functions are passed through from the front end(s?), even though they
+ * are technically speaking illegal.
+ */
+ val AllowDependentFunctions = newMode(9, "AllowDependentFunctions")
+
+ /** We are currently printing something: avoid to produce more logs about
+ * the printing
+ */
+ val Printing = newMode(10, "Printing")
+
+ /** We are currently typechecking an ident to determine whether some implicit
+ * is shadowed - don't do any other shadowing tests.
+ */
+ val ImplicitShadowing = newMode(11, "ImplicitShadowing")
+
+ /** We are currently in a `viewExists` check. In that case, ambiguous
+ * implicits checks are disabled and we succeed with the first implicit
+ * found.
+ */
+ val ImplicitExploration = newMode(12, "ImplicitExploration")
+
+ /** We are currently unpickling Scala2 info */
+ val Scala2Unpickling = newMode(13, "Scala2Unpickling")
+
+ /** Use Scala2 scheme for overloading and implicit resolution */
+ val OldOverloadingResolution = newMode(14, "OldOverloadingResolution")
+
+ /** Allow hk applications of type lambdas to wildcard arguments;
+ * used for checking that such applications do not normally arise
+ */
+ val AllowLambdaWildcardApply = newMode(15, "AllowHKApplyToWildcards")
+
+ /** Read original positions when unpickling from TASTY */
+ val ReadPositions = newMode(16, "ReadPositions")
+
+ val PatternOrType = Pattern | Type
+
+ /** We are elaborating the fully qualified name of a package clause.
+ * In this case, identifiers should never be imported.
+ */
+ val InPackageClauseName = newMode(17, "InPackageClauseName")
+}
diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala
new file mode 100644
index 000000000..4c7f5b0a9
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala
@@ -0,0 +1,432 @@
+package dotty.tools.dotc
+package core
+
+import java.security.MessageDigest
+import scala.annotation.switch
+import scala.io.Codec
+import Names._, StdNames._, Contexts._, Symbols._, Flags._
+import Decorators.StringDecorator
+import util.{Chars, NameTransformer}
+import Chars.isOperatorPart
+
+object NameOps {
+
+ final object compactify {
+ lazy val md5 = MessageDigest.getInstance("MD5")
+
+ /** COMPACTIFY
+ *
+ * The hashed name has the form (prefix + marker + md5 + marker + suffix), where
+ * - prefix/suffix.length = MaxNameLength / 4
+ * - md5.length = 32
+ *
+ * We obtain the formula:
+ *
+ * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+ *
+ * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+ */
+ def apply(s: String)(implicit ctx: Context): String = {
+ val marker = "$$$$"
+ val limit: Int = ctx.settings.maxClassfileName.value
+ val MaxNameLength = (limit - 6) min 2 * (limit - 6 - 2 * marker.length - 32)
+
+ def toMD5(s: String, edge: Int): String = {
+ val prefix = s take edge
+ val suffix = s takeRight edge
+
+ val cs = s.toArray
+ val bytes = Codec toUTF8 cs
+ md5 update bytes
+ val md5chars = (md5.digest() map (b => (b & 0xFF).toHexString)).mkString
+
+ prefix + marker + md5chars + marker + suffix
+ }
+
+ if (s.length <= MaxNameLength) s else toMD5(s, MaxNameLength / 4)
+ }
+ }
+
+ class PrefixNameExtractor(pre: TermName) {
+ def apply(name: TermName): TermName = pre ++ name
+ def unapply(name: TermName): Option[TermName] =
+ if (name startsWith pre) Some(name.drop(pre.length).asTermName) else None
+ }
+
+ object SuperAccessorName extends PrefixNameExtractor(nme.SUPER_PREFIX)
+ object InitializerName extends PrefixNameExtractor(nme.INITIALIZER_PREFIX)
+
+ implicit class NameDecorator[N <: Name](val name: N) extends AnyVal {
+ import nme._
+
+ def likeTyped(n: PreName): N =
+ (if (name.isTermName) n.toTermName else n.toTypeName).asInstanceOf[N]
+
+ def isConstructorName = name == CONSTRUCTOR || name == TRAIT_CONSTRUCTOR
+ def isStaticConstructorName = name == STATIC_CONSTRUCTOR
+ def isExceptionResultName = name startsWith EXCEPTION_RESULT_PREFIX
+ def isImplClassName = name endsWith IMPL_CLASS_SUFFIX
+ def isLocalDummyName = name startsWith LOCALDUMMY_PREFIX
+ def isLoopHeaderLabel = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
+ def isProtectedAccessorName = name startsWith PROTECTED_PREFIX
+ def isReplWrapperName = name containsSlice INTERPRETER_IMPORT_WRAPPER
+ def isTraitSetterName = name containsSlice TRAIT_SETTER_SEPARATOR
+ def isSetterName = name endsWith SETTER_SUFFIX
+ def isSingletonName = name endsWith SINGLETON_SUFFIX
+ def isModuleClassName = name endsWith MODULE_SUFFIX
+ def isAvoidClashName = name endsWith AVOID_CLASH_SUFFIX
+ def isImportName = name startsWith IMPORT
+ def isFieldName = name endsWith LOCAL_SUFFIX
+ def isShadowedName = name.length > 0 && name.head == '(' && name.startsWith(nme.SHADOWED)
+ def isDefaultGetterName = name.isTermName && name.asTermName.defaultGetterIndex >= 0
+ def isScala2LocalSuffix = name.endsWith(" ")
+ def isModuleVarName(name: Name): Boolean =
+ name.stripAnonNumberSuffix endsWith MODULE_VAR_SUFFIX
+ def isSelectorName = name.startsWith(" ") && name.tail.forall(_.isDigit)
+ def isLazyLocal = name.endsWith(nme.LAZY_LOCAL)
+ def isOuterSelect = name.endsWith(nme.OUTER_SELECT)
+ def isInlineAccessor = name.startsWith(nme.INLINE_ACCESSOR_PREFIX)
+
+ /** Is name a variable name? */
+ def isVariableName: Boolean = name.length > 0 && {
+ val first = name.head
+ (((first.isLower && first.isLetter) || first == '_')
+ && (name != false_)
+ && (name != true_)
+ && (name != null_))
+ }
+
+ def isOpAssignmentName: Boolean = name match {
+ case raw.NE | raw.LE | raw.GE | EMPTY =>
+ false
+ case _ =>
+ name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head)
+ }
+
+ /** If the name ends with $nn where nn are
+ * all digits, strip the $ and the digits.
+ * Otherwise return the argument.
+ */
+ def stripAnonNumberSuffix: Name = {
+ var pos = name.length
+ while (pos > 0 && name(pos - 1).isDigit)
+ pos -= 1
+
+ if (pos > 0 && pos < name.length && name(pos - 1) == '$')
+ name take (pos - 1)
+ else
+ name
+ }
+
+ /** Convert this module name to corresponding module class name */
+ def moduleClassName: TypeName = (name ++ tpnme.MODULE_SUFFIX).toTypeName
+
+ /** Convert this module class name to corresponding source module name */
+ def sourceModuleName: TermName = stripModuleClassSuffix.toTermName
+
+ /** If name ends in module class suffix, drop it */
+ def stripModuleClassSuffix: Name =
+ if (isModuleClassName) name dropRight MODULE_SUFFIX.length else name
+
+ /** Append a suffix so that this name does not clash with another name in the same scope */
+ def avoidClashName: TermName = (name ++ AVOID_CLASH_SUFFIX).toTermName
+
+ /** If name ends in "avoid clash" suffix, drop it */
+ def stripAvoidClashSuffix: Name =
+ if (isAvoidClashName) name dropRight AVOID_CLASH_SUFFIX.length else name
+
+ /** If flags is a ModuleClass but not a Package, add module class suffix */
+ def adjustIfModuleClass(flags: Flags.FlagSet): N = {
+ if (flags is (ModuleClass, butNot = Package)) name.asTypeName.moduleClassName
+ else stripAvoidClashSuffix
+ }.asInstanceOf[N]
+
+ /** The superaccessor for method with given name */
+ def superName: TermName = (nme.SUPER_PREFIX ++ name).toTermName
+
+ /** The expanded name of `name` relative to given class `base`.
+ */
+ def expandedName(base: Symbol, separator: Name)(implicit ctx: Context): N =
+ expandedName(if (base is Flags.ExpandedName) base.name else base.fullNameSeparated("$"), separator)
+
+ def expandedName(base: Symbol)(implicit ctx: Context): N = expandedName(base, nme.EXPAND_SEPARATOR)
+
+ /** The expanded name of `name` relative to `basename` with given `separator`
+ */
+ def expandedName(prefix: Name, separator: Name = nme.EXPAND_SEPARATOR): N =
+ name.fromName(prefix ++ separator ++ name).asInstanceOf[N]
+
+ def expandedName(prefix: Name): N = expandedName(prefix, nme.EXPAND_SEPARATOR)
+
+ /** Revert the expanded name. Note: This currently gives incorrect results
+ * if the normal name contains `nme.EXPAND_SEPARATOR`, i.e. two consecutive '$'
+ * signs. This can happen for instance if a super accessor is paired with
+ * an encoded name, e.g. super$$plus$eq. See #765.
+ */
+ def unexpandedName: N = {
+ var idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
+
+ // Hack to make super accessors from traits work. They would otherwise fail because of #765
+ // TODO: drop this once we have more robust name handling
+ if (idx > FalseSuperLength && name.slice(idx - FalseSuperLength, idx) == FalseSuper)
+ idx -= FalseSuper.length
+
+ if (idx < 0) name else (name drop (idx + nme.EXPAND_SEPARATOR.length)).asInstanceOf[N]
+ }
+
+ def expandedPrefix: N = {
+ val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
+ assert(idx >= 0)
+ name.take(idx).asInstanceOf[N]
+ }
+
+ def shadowedName: N = likeTyped(nme.SHADOWED ++ name)
+
+ def revertShadowed: N = likeTyped(name.drop(nme.SHADOWED.length))
+
+ def implClassName: N = likeTyped(name ++ tpnme.IMPL_CLASS_SUFFIX)
+
+ def errorName: N = likeTyped(name ++ nme.ERROR)
+
+ def freshened(implicit ctx: Context): N =
+ likeTyped(
+ if (name.isModuleClassName) name.stripModuleClassSuffix.freshened.moduleClassName
+ else likeTyped(ctx.freshName(name ++ NameTransformer.NAME_JOIN_STRING)))
+
+ /** Translate a name into a list of simple TypeNames and TermNames.
+ * In all segments before the last, type/term is determined by whether
+ * the following separator char is '.' or '#'. The last segment
+ * is of the same type as the original name.
+ *
+ * Examples:
+ *
+ * package foo {
+ * object Lorax { object Wog ; class Wog }
+ * class Lorax { object Zax ; class Zax }
+ * }
+ *
+ * f("foo.Lorax".toTermName) == List("foo": Term, "Lorax": Term) // object Lorax
+ * f("foo.Lorax".toTypeName) == List("foo": Term, "Lorax": Type) // class Lorax
+ * f("Lorax.Wog".toTermName) == List("Lorax": Term, "Wog": Term) // object Wog
+ * f("Lorax.Wog".toTypeName) == List("Lorax": Term, "Wog": Type) // class Wog
+ * f("Lorax#Zax".toTermName) == List("Lorax": Type, "Zax": Term) // object Zax
+ * f("Lorax#Zax".toTypeName) == List("Lorax": Type, "Zax": Type) // class Zax
+ *
+ * Note that in actual scala syntax you cannot refer to object Zax without an
+ * instance of Lorax, so Lorax#Zax could only mean the type. One might think
+ * that Lorax#Zax.type would work, but this is not accepted by the parser.
+ * For the purposes of referencing that object, the syntax is allowed.
+ */
+ def segments: List[Name] = {
+ def mkName(name: Name, follow: Char): Name =
+ if (follow == '.') name.toTermName else name.toTypeName
+
+ name.indexWhere(ch => ch == '.' || ch == '#') match {
+ case -1 =>
+ if (name.isEmpty) scala.Nil else name :: scala.Nil
+ case idx =>
+ mkName(name take idx, name(idx)) :: (name drop (idx + 1)).segments
+ }
+ }
+
+ /** The name of the generic runtime operation corresponding to an array operation */
+ def genericArrayOp: TermName = name match {
+ case nme.apply => nme.array_apply
+ case nme.length => nme.array_length
+ case nme.update => nme.array_update
+ case nme.clone_ => nme.array_clone
+ }
+
+ /** The name of the primitive runtime operation corresponding to an array operation */
+ def primitiveArrayOp: TermName = name match {
+ case nme.apply => nme.primitive.arrayApply
+ case nme.length => nme.primitive.arrayLength
+ case nme.update => nme.primitive.arrayUpdate
+ case nme.clone_ => nme.clone_
+ }
+
+ def specializedFor(classTargs: List[Types.Type], classTargsNames: List[Name], methodTargs: List[Types.Type], methodTarsNames: List[Name])(implicit ctx: Context): name.ThisName = {
+
+ def typeToTag(tp: Types.Type): Name = {
+ tp.classSymbol match {
+ case t if t eq defn.IntClass => nme.specializedTypeNames.Int
+ case t if t eq defn.BooleanClass => nme.specializedTypeNames.Boolean
+ case t if t eq defn.ByteClass => nme.specializedTypeNames.Byte
+ case t if t eq defn.LongClass => nme.specializedTypeNames.Long
+ case t if t eq defn.ShortClass => nme.specializedTypeNames.Short
+ case t if t eq defn.FloatClass => nme.specializedTypeNames.Float
+ case t if t eq defn.UnitClass => nme.specializedTypeNames.Void
+ case t if t eq defn.DoubleClass => nme.specializedTypeNames.Double
+ case t if t eq defn.CharClass => nme.specializedTypeNames.Char
+ case _ => nme.specializedTypeNames.Object
+ }
+ }
+
+ val methodTags: Seq[Name] = (methodTargs zip methodTarsNames).sortBy(_._2).map(x => typeToTag(x._1))
+ val classTags: Seq[Name] = (classTargs zip classTargsNames).sortBy(_._2).map(x => typeToTag(x._1))
+
+ name.fromName(name ++ nme.specializedTypeNames.prefix ++
+ methodTags.fold(nme.EMPTY)(_ ++ _) ++ nme.specializedTypeNames.separator ++
+ classTags.fold(nme.EMPTY)(_ ++ _) ++ nme.specializedTypeNames.suffix)
+ }
+
+ /** If name length exceeds allowable limit, replace part of it by hash */
+ def compactified(implicit ctx: Context): TermName = termName(compactify(name.toString))
+ }
+
+ // needed???
+ private val Boxed = Map[TypeName, TypeName](
+ tpnme.Boolean -> jtpnme.BoxedBoolean,
+ tpnme.Byte -> jtpnme.BoxedByte,
+ tpnme.Char -> jtpnme.BoxedCharacter,
+ tpnme.Short -> jtpnme.BoxedShort,
+ tpnme.Int -> jtpnme.BoxedInteger,
+ tpnme.Long -> jtpnme.BoxedLong,
+ tpnme.Float -> jtpnme.BoxedFloat,
+ tpnme.Double -> jtpnme.BoxedDouble)
+
+ implicit class TermNameDecorator(val name: TermName) extends AnyVal {
+ import nme._
+
+ def setterName: TermName =
+ if (name.isFieldName) name.fieldToGetter.setterName
+ else name ++ SETTER_SUFFIX
+
+ def getterName: TermName =
+ if (name.isFieldName) fieldToGetter
+ else setterToGetter
+
+ def fieldName: TermName =
+ if (name.isSetterName) {
+ if (name.isTraitSetterName) {
+ // has form <$-separated-trait-name>$_setter_$ `name`_$eq
+ val start = name.indexOfSlice(TRAIT_SETTER_SEPARATOR) + TRAIT_SETTER_SEPARATOR.length
+ val end = name.indexOfSlice(SETTER_SUFFIX)
+ name.slice(start, end) ++ LOCAL_SUFFIX
+ } else getterName.fieldName
+ }
+ else name ++ LOCAL_SUFFIX
+
+ private def setterToGetter: TermName = {
+ assert(name.endsWith(SETTER_SUFFIX), name + " is referenced as a setter but has wrong name format")
+ name.take(name.length - SETTER_SUFFIX.length).asTermName
+ }
+
+ def fieldToGetter: TermName = {
+ assert(name.isFieldName)
+ name.take(name.length - LOCAL_SUFFIX.length).asTermName
+ }
+
+ /** Nominally, name$default$N, encoded for <init>
+ * @param Post the parameters position.
+ * @note Default getter name suffixes start at 1, so `pos` has to be adjusted by +1
+ */
+ def defaultGetterName(pos: Int): TermName = {
+ val prefix = if (name.isConstructorName) DEFAULT_GETTER_INIT else name
+ prefix ++ DEFAULT_GETTER ++ (pos + 1).toString
+ }
+
+ /** Nominally, name from name$default$N, CONSTRUCTOR for <init> */
+ def defaultGetterToMethod: TermName = {
+ val p = name.indexOfSlice(DEFAULT_GETTER)
+ if (p >= 0) {
+ val q = name.take(p).asTermName
+ // i.e., if (q.decoded == CONSTRUCTOR.toString) CONSTRUCTOR else q
+ if (q == DEFAULT_GETTER_INIT) CONSTRUCTOR else q
+ } else name
+ }
+
+ /** If this is a default getter, its index (starting from 0), else -1 */
+ def defaultGetterIndex: Int = {
+ var i = name.length
+ while (i > 0 && name(i - 1).isDigit) i -= 1
+ if (i > 0 && i < name.length && name.take(i).endsWith(DEFAULT_GETTER))
+ name.drop(i).toString.toInt - 1
+ else
+ -1
+ }
+
+ def stripScala2LocalSuffix: TermName =
+ if (name.isScala2LocalSuffix) name.init.asTermName else name
+
+ /** The name of an accessor for protected symbols. */
+ def protectedAccessorName: TermName =
+ PROTECTED_PREFIX ++ name.unexpandedName
+
+ /** The name of a setter for protected symbols. Used for inherited Java fields. */
+ def protectedSetterName: TermName =
+ PROTECTED_SET_PREFIX ++ name.unexpandedName
+
+ def moduleVarName: TermName =
+ name ++ MODULE_VAR_SUFFIX
+
+ /** The name unary_x for a prefix operator x */
+ def toUnaryName: TermName = name match {
+ case raw.MINUS => UNARY_-
+ case raw.PLUS => UNARY_+
+ case raw.TILDE => UNARY_~
+ case raw.BANG => UNARY_!
+ case _ => name
+ }
+
+ /** The name of a method which stands in for a primitive operation
+ * during structural type dispatch.
+ */
+ def primitiveInfixMethodName: TermName = name match {
+ case OR => takeOr
+ case XOR => takeXor
+ case AND => takeAnd
+ case EQ => testEqual
+ case NE => testNotEqual
+ case ADD => add
+ case SUB => subtract
+ case MUL => multiply
+ case DIV => divide
+ case MOD => takeModulo
+ case LSL => shiftSignedLeft
+ case LSR => shiftLogicalRight
+ case ASR => shiftSignedRight
+ case LT => testLessThan
+ case LE => testLessOrEqualThan
+ case GE => testGreaterOrEqualThan
+ case GT => testGreaterThan
+ case ZOR => takeConditionalOr
+ case ZAND => takeConditionalAnd
+ case _ => NO_NAME
+ }
+
+ /** Postfix/prefix, really.
+ */
+ def primitivePostfixMethodName: TermName = name match {
+ case UNARY_! => takeNot
+ case UNARY_+ => positive
+ case UNARY_- => negate
+ case UNARY_~ => complement
+ case `toByte` => toByte
+ case `toShort` => toShort
+ case `toChar` => toCharacter
+ case `toInt` => toInteger
+ case `toLong` => toLong
+ case `toFloat` => toFloat
+ case `toDouble` => toDouble
+ case _ => NO_NAME
+ }
+
+ def primitiveMethodName: TermName =
+ primitiveInfixMethodName match {
+ case NO_NAME => primitivePostfixMethodName
+ case name => name
+ }
+
+ def lazyLocalName = name ++ nme.LAZY_LOCAL
+ def nonLazyName = {
+ assert(name.isLazyLocal)
+ name.dropRight(nme.LAZY_LOCAL.length)
+ }
+
+ def inlineAccessorName = nme.INLINE_ACCESSOR_PREFIX ++ name ++ "$"
+ }
+
+ private final val FalseSuper = "$$super".toTermName
+ private val FalseSuperLength = FalseSuper.length
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala
new file mode 100644
index 000000000..11f0b55a8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Names.scala
@@ -0,0 +1,372 @@
+package dotty.tools
+package dotc
+package core
+
+import scala.io.Codec
+import util.NameTransformer
+import printing.{Showable, Texts, Printer}
+import Texts.Text
+import Decorators._
+import Contexts.Context
+import collection.IndexedSeqOptimized
+import collection.generic.CanBuildFrom
+import collection.mutable.{ Builder, StringBuilder }
+import collection.immutable.WrappedString
+import collection.generic.CanBuildFrom
+import util.DotClass
+//import annotation.volatile
+
+object Names {
+
+ /** A common class for things that can be turned into names.
+ * Instances are both names and strings, the latter via a decorator.
+ */
+ trait PreName extends Any with Showable {
+ def toTypeName: TypeName
+ def toTermName: TermName
+ }
+
+ implicit def eqName: Eq[Name, Name] = Eq
+
+ /** A name is essentially a string, with three differences
+ * 1. Names belong in one of two name spaces: they are type names or term names.
+ * Term names have a sub-category of "local" field names.
+ * The same string can correspond a name in each of the three namespaces.
+ * 2. Names are hash-consed. Two names
+ * representing the same string in the same universe are always reference identical.
+ * 3. Names are intended to be encoded strings. @see dotc.util.NameTransformer.
+ * The encoding will be applied when converting a string to a name.
+ */
+ abstract class Name extends DotClass
+ with PreName
+ with collection.immutable.Seq[Char]
+ with IndexedSeqOptimized[Char, Name] {
+
+ /** A type for names of the same kind as this name */
+ type ThisName <: Name
+
+ /** The start index in the character array */
+ val start: Int
+
+ /** The length of the names */
+ override val length: Int
+
+ /** Is this name a type name? */
+ def isTypeName: Boolean
+
+ /** Is this name a term name? */
+ def isTermName: Boolean
+
+ /** This name converted to a type name */
+ def toTypeName: TypeName
+
+ /** This name converted to a term name */
+ def toTermName: TermName
+
+ /** This name downcasted to a type name */
+ def asTypeName: TypeName
+
+ /** This name downcasted to a term name */
+ def asTermName: TermName
+
+ /** Create a new name of same kind as this one, in the given
+ * basis, with `len` characters taken from `cs` starting at `offset`.
+ */
+ def fromChars(cs: Array[Char], offset: Int, len: Int): ThisName
+
+ /** Create new name of same kind as this name and with same
+ * characters as given `name`.
+ */
+ def fromName(name: Name): ThisName = fromChars(chrs, name.start, name.length)
+
+ /** Create new name of same kind as this name with characters from
+ * the given string
+ */
+ def fromString(str: String): ThisName = {
+ val cs = str.toCharArray
+ fromChars(cs, 0, cs.length)
+ }
+
+ override def toString =
+ if (length == 0) "" else new String(chrs, start, length)
+
+ def toText(printer: Printer): Text = printer.toText(this)
+
+ /** Write to UTF8 representation of this name to given character array.
+ * Start copying to index `to`. Return index of next free byte in array.
+ * Array must have enough remaining space for all bytes
+ * (i.e. maximally 3*length bytes).
+ */
+ final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
+ val bytes = Codec.toUTF8(chrs, start, length)
+ scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
+ offset + bytes.length
+ }
+
+ /** Replace \$op_name's by corresponding operator symbols. */
+ def decode: Name =
+ if (contains('$')) fromString(NameTransformer.decode(toString))
+ else this
+
+ /** Replace operator symbols by corresponding \$op_name's. */
+ def encode: Name =
+ if (dontEncode(toTermName)) this else NameTransformer.encode(this)
+
+ /** A more efficient version of concatenation */
+ def ++ (other: Name): ThisName = ++ (other.toString)
+
+ def ++ (other: String): ThisName = {
+ val s = toString + other
+ fromChars(s.toCharArray, 0, s.length)
+ }
+
+ def replace(from: Char, to: Char): ThisName = {
+ val cs = new Array[Char](length)
+ Array.copy(chrs, start, cs, 0, length)
+ for (i <- 0 until length) {
+ if (cs(i) == from) cs(i) = to
+ }
+ fromChars(cs, 0, length)
+ }
+
+ def contains(ch: Char): Boolean = {
+ var i = 0
+ while (i < length && chrs(start + i) != ch) i += 1
+ i < length
+ }
+
+ def firstChar = chrs(start)
+
+ // ----- Collections integration -------------------------------------
+
+ override protected[this] def thisCollection: WrappedString = new WrappedString(repr.toString)
+ override protected[this] def toCollection(repr: Name): WrappedString = new WrappedString(repr.toString)
+
+ override protected[this] def newBuilder: Builder[Char, Name] = unsupported("newBuilder")
+
+ override def apply(index: Int): Char = chrs(start + index)
+
+ override def slice(from: Int, until: Int): ThisName =
+ fromChars(chrs, start + from, until - from)
+
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ override def seq = toCollection(this)
+ }
+
+ class TermName(val start: Int, val length: Int, @sharable private[Names] var next: TermName) extends Name {
+ // `next` is @sharable because it is only modified in the synchronized block of termName.
+ type ThisName = TermName
+ def isTypeName = false
+ def isTermName = true
+
+ @sharable // because it is only modified in the synchronized block of toTypeName.
+ @volatile private[this] var _typeName: TypeName = null
+
+ def toTypeName: TypeName = {
+ if (_typeName == null)
+ synchronized {
+ if (_typeName == null)
+ _typeName = new TypeName(start, length, this)
+ }
+ _typeName
+ }
+ def toTermName = this
+ def asTypeName = throw new ClassCastException(this + " is not a type name")
+ def asTermName = this
+
+ override def hashCode: Int = start
+
+ override protected[this] def newBuilder: Builder[Char, Name] = termNameBuilder
+
+ def fromChars(cs: Array[Char], offset: Int, len: Int): TermName = termName(cs, offset, len)
+ }
+
+ class TypeName(val start: Int, val length: Int, val toTermName: TermName) extends Name {
+ type ThisName = TypeName
+ def isTypeName = true
+ def isTermName = false
+ def toTypeName = this
+ def asTypeName = this
+ def asTermName = throw new ClassCastException(this + " is not a term name")
+
+ override def hashCode: Int = -start
+
+ override protected[this] def newBuilder: Builder[Char, Name] =
+ termNameBuilder.mapResult(_.toTypeName)
+
+ def fromChars(cs: Array[Char], offset: Int, len: Int): TypeName = typeName(cs, offset, len)
+ }
+
+ // Nametable
+
+ private final val InitialHashSize = 0x8000
+ private final val InitialNameSize = 0x20000
+ private final val fillFactor = 0.7
+
+ /** Memory to store all names sequentially. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private[dotty] var chrs: Array[Char] = new Array[Char](InitialNameSize)
+
+ /** The number of characters filled. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private var nc = 0
+
+ /** Hashtable for finding term names quickly. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private var table = new Array[TermName](InitialHashSize)
+
+ /** The number of defined names. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private var size = 1
+
+ /** The hash of a name made of from characters cs[offset..offset+len-1]. */
+ private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
+ if (len > 0)
+ (len * (41 * 41 * 41) +
+ cs(offset) * (41 * 41) +
+ cs(offset + len - 1) * 41 +
+ cs(offset + (len >> 1)))
+ else 0
+
+ /** Is (the ASCII representation of) name at given index equal to
+ * cs[offset..offset+len-1]?
+ */
+ private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
+ var i = 0
+ while ((i < len) && (chrs(index + i) == cs(offset + i)))
+ i += 1
+ i == len
+ }
+
+ /** Create a term name from the characters in cs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def termName(cs: Array[Char], offset: Int, len: Int): TermName = synchronized {
+ util.Stats.record("termName")
+ val h = hashValue(cs, offset, len) & (table.size - 1)
+
+ /** Make sure the capacity of the character array is at least `n` */
+ def ensureCapacity(n: Int) =
+ if (n > chrs.length) {
+ val newchrs = new Array[Char](chrs.length * 2)
+ chrs.copyToArray(newchrs)
+ chrs = newchrs
+ }
+
+ /** Enter characters into chrs array. */
+ def enterChars(): Unit = {
+ ensureCapacity(nc + len)
+ var i = 0
+ while (i < len) {
+ chrs(nc + i) = cs(offset + i)
+ i += 1
+ }
+ nc += len
+ }
+
+ /** Rehash chain of names */
+ def rehash(name: TermName): Unit =
+ if (name != null) {
+ val oldNext = name.next
+ val h = hashValue(chrs, name.start, name.length) & (table.size - 1)
+ name.next = table(h)
+ table(h) = name
+ rehash(oldNext)
+ }
+
+ /** Make sure the hash table is large enough for the given load factor */
+ def incTableSize() = {
+ size += 1
+ if (size.toDouble / table.size > fillFactor) {
+ val oldTable = table
+ table = new Array[TermName](table.size * 2)
+ for (i <- 0 until oldTable.size) rehash(oldTable(i))
+ }
+ }
+
+ val next = table(h)
+ var name = next
+ while (name ne null) {
+ if (name.length == len && equals(name.start, cs, offset, len))
+ return name
+ name = name.next
+ }
+ name = new TermName(nc, len, next)
+ enterChars()
+ table(h) = name
+ incTableSize()
+ name
+ }
+
+ /** Create a type name from the characters in cs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def typeName(cs: Array[Char], offset: Int, len: Int): TypeName =
+ termName(cs, offset, len).toTypeName
+
+ /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def termName(bs: Array[Byte], offset: Int, len: Int): TermName = {
+ val chars = Codec.fromUTF8(bs, offset, len)
+ termName(chars, 0, chars.length)
+ }
+
+ /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def typeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
+ termName(bs, offset, len).toTypeName
+
+ /** Create a term name from a string, without encoding operators */
+ def termName(s: String): TermName = termName(s.toCharArray, 0, s.length)
+
+ /** Create a type name from a string, without encoding operators */
+ def typeName(s: String): TypeName = typeName(s.toCharArray, 0, s.length)
+
+ /** The term name represented by the empty string */
+ val EmptyTermName = new TermName(-1, 0, null)
+
+ table(0) = EmptyTermName
+
+ /** The type name represented by the empty string */
+ val EmptyTypeName = EmptyTermName.toTypeName
+
+ // can't move CONSTRUCTOR/EMPTY_PACKAGE to `nme` because of bootstrap failures in `encode`.
+ val CONSTRUCTOR = termName("<init>")
+ val STATIC_CONSTRUCTOR = termName("<clinit>")
+ val EMPTY_PACKAGE = termName("<empty>")
+
+ val dontEncode = Set(CONSTRUCTOR, EMPTY_PACKAGE)
+
+ def termNameBuilder: Builder[Char, TermName] =
+ StringBuilder.newBuilder.mapResult(termName)
+
+ implicit val nameCanBuildFrom: CanBuildFrom[Name, Char, Name] = new CanBuildFrom[Name, Char, Name] {
+ def apply(from: Name): Builder[Char, Name] =
+ StringBuilder.newBuilder.mapResult(s => from.fromChars(s.toCharArray, 0, s.length))
+ def apply(): Builder[Char, Name] = termNameBuilder
+ }
+
+ implicit val NameOrdering: Ordering[Name] = new Ordering[Name] {
+ def compare(x: Name, y: Name): Int = {
+ if (x.isTermName && y.isTypeName) 1
+ else if (x.isTypeName && y.isTermName) -1
+ else if (x eq y) 0
+ else {
+ val until = x.length min y.length
+ var i = 0
+
+ while (i < until && x(i) == y(i)) i = i + 1
+
+ if (i < until) {
+ if (x(i) < y(i)) -1
+ else /*(x(i) > y(i))*/ 1
+ } else {
+ x.length - y.length
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
new file mode 100644
index 000000000..72c7a8e51
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -0,0 +1,636 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._, Decorators._
+import util.SimpleMap
+import collection.mutable
+import printing.{Printer, Showable}
+import printing.Texts._
+import config.Config
+import collection.immutable.BitSet
+import reflect.ClassTag
+import annotation.tailrec
+
+object OrderingConstraint {
+
+ type ArrayValuedMap[T] = SimpleMap[PolyType, Array[T]]
+
+ /** The type of `OrderingConstraint#boundsMap` */
+ type ParamBounds = ArrayValuedMap[Type]
+
+ /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
+ type ParamOrdering = ArrayValuedMap[List[PolyParam]]
+
+ /** A new constraint with given maps */
+ private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
+ val result = new OrderingConstraint(boundsMap, lowerMap, upperMap)
+ if (Config.checkConstraintsNonCyclic) result.checkNonCyclic()
+ ctx.runInfo.recordConstraintSize(result, result.boundsMap.size)
+ result
+ }
+
+ /** A lens for updating a single entry array in one of the three constraint maps */
+ abstract class ConstraintLens[T <: AnyRef: ClassTag] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[T]
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[T])(implicit ctx: Context): OrderingConstraint
+ def initial: T
+
+ def apply(c: OrderingConstraint, poly: PolyType, idx: Int) = {
+ val es = entries(c, poly)
+ if (es == null) initial else es(idx)
+ }
+
+ /** The `current` constraint but with the entry for `param` updated to `entry`.
+ * `current` is used linearly. If it is different from `prev` it is
+ * known to be dead after the call. Hence it is OK to update destructively
+ * parts of `current` which are not shared by `prev`.
+ */
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
+ poly: PolyType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = {
+ var es = entries(current, poly)
+ if (es != null && (es(idx) eq entry)) current
+ else {
+ val result =
+ if (es == null) {
+ es = Array.fill(poly.paramNames.length)(initial)
+ updateEntries(current, poly, es)
+ }
+ else if (es ne entries(prev, poly))
+ current // can re-use existing entries array.
+ else {
+ es = es.clone
+ updateEntries(current, poly, es)
+ }
+ es(idx) = entry
+ result
+ }
+ }
+
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
+ param: PolyParam, entry: T)(implicit ctx: Context): OrderingConstraint =
+ update(prev, current, param.binder, param.paramNum, entry)
+
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ update(prev, current, poly, idx, f(apply(current, poly, idx)))
+
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ param: PolyParam, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ map(prev, current, param.binder, param.paramNum, f)
+ }
+
+ val boundsLens = new ConstraintLens[Type] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
+ c.boundsMap(poly)
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
+ newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap)
+ def initial = NoType
+ }
+
+ val lowerLens = new ConstraintLens[List[PolyParam]] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ c.lowerMap(poly)
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap)
+ def initial = Nil
+ }
+
+ val upperLens = new ConstraintLens[List[PolyParam]] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ c.upperMap(poly)
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries))
+ def initial = Nil
+ }
+}
+
+import OrderingConstraint._
+
+/** Constraint over undetermined type parameters that keeps separate maps to
+ * reflect parameter orderings.
+ * @param boundsMap a map from PolyType to arrays.
+ * Each array contains twice the number of entries as there a type parameters
+ * in the PolyType. The first half of the array contains the type bounds that constrain the
+ * polytype's type parameters. The second half might contain type variables that
+ * track the corresponding parameters, or is left empty (filled with nulls).
+ * An instantiated type parameter is represented by having its instance type in
+ * the corresponding array entry. The dual use of arrays for poly params
+ * and typevars is to save space and hopefully gain some speed.
+ *
+ * @param lowerMap a map from PolyTypes to arrays. Each array entry corresponds
+ * to a parameter P of the polytype; it contains all constrained parameters
+ * Q that are known to be smaller than P, i.e. Q <: P.
+ * @param upperMap a map from PolyTypes to arrays. Each array entry corresponds
+ * to a parameter P of the polytype; it contains all constrained parameters
+ * Q that are known to be greater than P, i.e. P <: Q.
+ */
+class OrderingConstraint(private val boundsMap: ParamBounds,
+ private val lowerMap : ParamOrdering,
+ private val upperMap : ParamOrdering) extends Constraint {
+
+ type This = OrderingConstraint
+
+// ----------- Basic indices --------------------------------------------------
+
+ /** The number of type parameters in the given entry array */
+ private def paramCount(entries: Array[Type]) = entries.length >> 1
+
+ /** The type variable corresponding to parameter numbered `n`, null if none was created */
+ private def typeVar(entries: Array[Type], n: Int): Type =
+ entries(paramCount(entries) + n)
+
+ /** The `boundsMap` entry corresponding to `param` */
+ def entry(param: PolyParam): Type = {
+ val entries = boundsMap(param.binder)
+ if (entries == null) NoType
+ else entries(param.paramNum)
+ }
+
+// ----------- Contains tests --------------------------------------------------
+
+ def contains(pt: PolyType): Boolean = boundsMap(pt) != null
+
+ def contains(param: PolyParam): Boolean = {
+ val entries = boundsMap(param.binder)
+ entries != null && isBounds(entries(param.paramNum))
+ }
+
+ def contains(tvar: TypeVar): Boolean = {
+ val origin = tvar.origin
+ val entries = boundsMap(origin.binder)
+ val pnum = origin.paramNum
+ entries != null && isBounds(entries(pnum)) && (typeVar(entries, pnum) eq tvar)
+ }
+
+ private def isBounds(tp: Type) = tp.isInstanceOf[TypeBounds]
+
+// ---------- Dependency handling ----------------------------------------------
+
+ def lower(param: PolyParam): List[PolyParam] = lowerLens(this, param.binder, param.paramNum)
+ def upper(param: PolyParam): List[PolyParam] = upperLens(this, param.binder, param.paramNum)
+
+ def minLower(param: PolyParam): List[PolyParam] = {
+ val all = lower(param)
+ all.filterNot(p => all.exists(isLess(p, _)))
+ }
+
+ def minUpper(param: PolyParam): List[PolyParam] = {
+ val all = upper(param)
+ all.filterNot(p => all.exists(isLess(_, p)))
+ }
+
+ def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam] =
+ lower(param).filterNot(isLess(_, butNot))
+
+ def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam] =
+ upper(param).filterNot(isLess(butNot, _))
+
+// ---------- Info related to PolyParams -------------------------------------------
+
+ def isLess(param1: PolyParam, param2: PolyParam): Boolean =
+ upper(param1).contains(param2)
+
+ def nonParamBounds(param: PolyParam): TypeBounds =
+ entry(param).asInstanceOf[TypeBounds]
+
+ def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type =
+ (nonParamBounds(param).lo /: minLower(param))(_ | _)
+
+ def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type =
+ (nonParamBounds(param).hi /: minUpper(param))(_ & _)
+
+ def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds =
+ nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param))
+
+ def typeVarOfParam(param: PolyParam): Type = {
+ val entries = boundsMap(param.binder)
+ if (entries == null) NoType
+ else {
+ val tvar = typeVar(entries, param.paramNum)
+ if (tvar != null) tvar else NoType
+ }
+ }
+
+// ---------- Adding PolyTypes --------------------------------------------------
+
+ /** The list of parameters P such that, for a fresh type parameter Q:
+ *
+ * Q <: tp implies Q <: P and isUpper = true, or
+ * tp <: Q implies P <: Q and isUpper = false
+ */
+ def dependentParams(tp: Type, isUpper: Boolean): List[PolyParam] = tp match {
+ case param: PolyParam if contains(param) =>
+ param :: (if (isUpper) upper(param) else lower(param))
+ case tp: AndOrType =>
+ val ps1 = dependentParams(tp.tp1, isUpper)
+ val ps2 = dependentParams(tp.tp2, isUpper)
+ if (isUpper == tp.isAnd) ps1.union(ps2) else ps1.intersect(ps2)
+ case _ =>
+ Nil
+ }
+
+ /** The bound type `tp` without constrained parameters which are clearly
+ * dependent. A parameter in an upper bound is clearly dependent if it appears
+ * in a hole of a context H given by:
+ *
+ * H = []
+ * H & T
+ * T & H
+ *
+ * (the idea is that a parameter P in a H context is guaranteed to be a supertype of the
+ * bounded parameter.)
+ * Analogously, a parameter in a lower bound is clearly dependent if it appears
+ * in a hole of a context H given by:
+ *
+ * L = []
+ * L | T
+ * T | L
+ *
+ * "Clearly dependent" is not synonymous with "dependent" in the sense
+ * it is defined in `dependentParams`. Dependent parameters are handled
+ * in `updateEntry`. The idea of stripping off clearly dependent parameters
+ * and to handle them separately is for efficiency, so that type expressions
+ * used as bounds become smaller.
+ *
+ * @param isUpper If true, `bound` is an upper bound, else a lower bound.
+ */
+ private def stripParams(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ isUpper: Boolean)(implicit ctx: Context): Type = tp match {
+ case param: PolyParam if contains(param) =>
+ if (!paramBuf.contains(param)) paramBuf += param
+ NoType
+ case tp: AndOrType if isUpper == tp.isAnd =>
+ val tp1 = stripParams(tp.tp1, paramBuf, isUpper)
+ val tp2 = stripParams(tp.tp2, paramBuf, isUpper)
+ if (tp1.exists)
+ if (tp2.exists) tp.derivedAndOrType(tp1, tp2)
+ else tp1
+ else tp2
+ case _ =>
+ tp
+ }
+
+ /** The bound type `tp` without clearly dependent parameters.
+ * A top or bottom type if type consists only of dependent parameters.
+ * @param isUpper If true, `bound` is an upper bound, else a lower bound.
+ */
+ private def normalizedType(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ isUpper: Boolean)(implicit ctx: Context): Type =
+ stripParams(tp, paramBuf, isUpper)
+ .orElse(if (isUpper) defn.AnyType else defn.NothingType)
+
+ def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This = {
+ assert(!contains(poly))
+ val nparams = poly.paramNames.length
+ val entries1 = new Array[Type](nparams * 2)
+ poly.paramBounds.copyToArray(entries1, 0)
+ tvars.copyToArray(entries1, nparams)
+ newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap).init(poly)
+ }
+
+ /** Split dependent parameters off the bounds for parameters in `poly`.
+ * Update all bounds to be normalized and update ordering to account for
+ * dependent parameters.
+ */
+ private def init(poly: PolyType)(implicit ctx: Context): This = {
+ var current = this
+ val loBuf, hiBuf = new mutable.ListBuffer[PolyParam]
+ var i = 0
+ while (i < poly.paramNames.length) {
+ val param = PolyParam(poly, i)
+ val bounds = nonParamBounds(param)
+ val lo = normalizedType(bounds.lo, loBuf, isUpper = false)
+ val hi = normalizedType(bounds.hi, hiBuf, isUpper = true)
+ current = updateEntry(current, param, bounds.derivedTypeBounds(lo, hi))
+ current = (current /: loBuf)(order(_, _, param))
+ current = (current /: hiBuf)(order(_, param, _))
+ loBuf.clear()
+ hiBuf.clear()
+ i += 1
+ }
+ if (Config.checkConstraintsNonCyclic) checkNonCyclic()
+ current
+ }
+
+// ---------- Updates ------------------------------------------------------------
+
+ /** Add the fact `param1 <: param2` to the constraint `current` and propagate
+ * `<:<` relationships between parameters ("edges") but not bounds.
+ */
+ private def order(current: This, param1: PolyParam, param2: PolyParam)(implicit ctx: Context): This =
+ if (param1 == param2 || current.isLess(param1, param2)) this
+ else {
+ assert(contains(param1))
+ assert(contains(param2))
+ val newUpper = param2 :: exclusiveUpper(param2, param1)
+ val newLower = param1 :: exclusiveLower(param1, param2)
+ val current1 = (current /: newLower)(upperLens.map(this, _, _, newUpper ::: _))
+ val current2 = (current1 /: newUpper)(lowerLens.map(this, _, _, newLower ::: _))
+ current2
+ }
+
+ def addLess(param1: PolyParam, param2: PolyParam)(implicit ctx: Context): This =
+ order(this, param1, param2)
+
+ def updateEntry(current: This, param: PolyParam, tp: Type)(implicit ctx: Context): This = {
+ var current1 = boundsLens.update(this, current, param, tp)
+ tp match {
+ case TypeBounds(lo, hi) =>
+ for (p <- dependentParams(lo, isUpper = false))
+ current1 = order(current1, p, param)
+ for (p <- dependentParams(hi, isUpper = true))
+ current1 = order(current1, param, p)
+ case _ =>
+ }
+ current1
+ }
+
+ def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This =
+ updateEntry(this, param, tp)
+
+ def unify(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This = {
+ val p1Bounds = (nonParamBounds(p1) & nonParamBounds(p2)).substParam(p2, p1)
+ updateEntry(p1, p1Bounds).replace(p2, p1)
+ }
+
+ def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This = {
+ val oldBounds @ TypeBounds(lo, hi) = nonParamBounds(param)
+ val newBounds =
+ if (isUpper) oldBounds.derivedTypeBounds(lo, hi & bound)
+ else oldBounds.derivedTypeBounds(lo | bound, hi)
+ updateEntry(param, newBounds)
+ }
+
+// ---------- Removals ------------------------------------------------------------
+
+ /** A new constraint which is derived from this constraint by removing
+ * the type parameter `param` from the domain and replacing all top-level occurrences
+ * of the parameter elsewhere in the constraint by type `tp`, or a conservative
+ * approximation of it if that is needed to avoid cycles.
+ * Occurrences nested inside a refinement or prefix are not affected.
+ *
+ * The reason we need to substitute top-level occurrences of the parameter
+ * is to deal with situations like the following. Say we have in the constraint
+ *
+ * P <: Q & String
+ * Q
+ *
+ * and we replace Q with P. Then substitution gives
+ *
+ * P <: P & String
+ *
+ * this would be a cyclic constraint is therefore changed by `normalize` and
+ * `recombine` below to
+ *
+ * P <: String
+ *
+ * approximating the RHS occurrence of P with Any. Without the substitution we
+ * would not find out where we need to approximate. Occurrences of parameters
+ * that are not top-level are not affected.
+ */
+ def replace(param: PolyParam, tp: Type)(implicit ctx: Context): OrderingConstraint = {
+ val replacement = tp.dealias.stripTypeVar
+ if (param == replacement) this
+ else {
+ assert(replacement.isValueTypeOrLambda)
+ val poly = param.binder
+ val idx = param.paramNum
+
+ def removeParam(ps: List[PolyParam]) =
+ ps.filterNot(p => p.binder.eq(poly) && p.paramNum == idx)
+
+ def replaceParam(tp: Type, atPoly: PolyType, atIdx: Int): Type = tp match {
+ case bounds @ TypeBounds(lo, hi) =>
+
+ def recombine(andor: AndOrType, op: (Type, Boolean) => Type, isUpper: Boolean): Type = {
+ val tp1 = op(andor.tp1, isUpper)
+ val tp2 = op(andor.tp2, isUpper)
+ if ((tp1 eq andor.tp1) && (tp2 eq andor.tp2)) andor
+ else if (andor.isAnd) tp1 & tp2
+ else tp1 | tp2
+ }
+
+ def normalize(tp: Type, isUpper: Boolean): Type = tp match {
+ case p: PolyParam if p.binder == atPoly && p.paramNum == atIdx =>
+ if (isUpper) defn.AnyType else defn.NothingType
+ case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, normalize, isUpper)
+ case _ => tp
+ }
+
+ def replaceIn(tp: Type, isUpper: Boolean): Type = tp match {
+ case `param` => normalize(replacement, isUpper)
+ case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper)
+ case _ => tp.substParam(param, replacement)
+ }
+
+ bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true))
+ case _ =>
+ tp.substParam(param, replacement)
+ }
+
+ var current =
+ if (isRemovable(poly)) remove(poly) else updateEntry(param, replacement)
+ current.foreachParam {(p, i) =>
+ current = boundsLens.map(this, current, p, i, replaceParam(_, p, i))
+ current = lowerLens.map(this, current, p, i, removeParam)
+ current = upperLens.map(this, current, p, i, removeParam)
+ }
+ current
+ }
+ }
+
+ def remove(pt: PolyType)(implicit ctx: Context): This = {
+ def removeFromOrdering(po: ParamOrdering) = {
+ def removeFromBoundss(key: PolyType, bndss: Array[List[PolyParam]]): Array[List[PolyParam]] = {
+ val bndss1 = bndss.map(_.filterConserve(_.binder ne pt))
+ if (bndss.corresponds(bndss1)(_ eq _)) bndss else bndss1
+ }
+ po.remove(pt).mapValuesNow(removeFromBoundss)
+ }
+ newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap))
+ }
+
+ def isRemovable(pt: PolyType): Boolean = {
+ val entries = boundsMap(pt)
+ @tailrec def allRemovable(last: Int): Boolean =
+ if (last < 0) true
+ else typeVar(entries, last) match {
+ case tv: TypeVar => tv.inst.exists && allRemovable(last - 1)
+ case _ => false
+ }
+ allRemovable(paramCount(entries) - 1)
+ }
+
+// ---------- Exploration --------------------------------------------------------
+
+ def domainPolys: List[PolyType] = boundsMap.keys
+
+ def domainParams: List[PolyParam] =
+ for {
+ (poly, entries) <- boundsMap.toList
+ n <- 0 until paramCount(entries)
+ if entries(n).exists
+ } yield PolyParam(poly, n)
+
+ def forallParams(p: PolyParam => Boolean): Boolean = {
+ boundsMap.foreachBinding { (poly, entries) =>
+ for (i <- 0 until paramCount(entries))
+ if (isBounds(entries(i)) && !p(PolyParam(poly, i))) return false
+ }
+ true
+ }
+
+ def foreachParam(p: (PolyType, Int) => Unit): Unit =
+ boundsMap.foreachBinding { (poly, entries) =>
+ 0.until(poly.paramNames.length).foreach(p(poly, _))
+ }
+
+ def foreachTypeVar(op: TypeVar => Unit): Unit =
+ boundsMap.foreachBinding { (poly, entries) =>
+ for (i <- 0 until paramCount(entries)) {
+ typeVar(entries, i) match {
+ case tv: TypeVar if !tv.inst.exists => op(tv)
+ case _ =>
+ }
+ }
+ }
+
+ def & (other: Constraint)(implicit ctx: Context) = {
+ def merge[T](m1: ArrayValuedMap[T], m2: ArrayValuedMap[T], join: (T, T) => T): ArrayValuedMap[T] = {
+ var merged = m1
+ def mergeArrays(xs1: Array[T], xs2: Array[T]) = {
+ val xs = xs1.clone
+ for (i <- xs.indices) xs(i) = join(xs1(i), xs2(i))
+ xs
+ }
+ m2.foreachBinding { (poly, xs2) =>
+ merged = merged.updated(poly,
+ if (m1.contains(poly)) mergeArrays(m1(poly), xs2) else xs2)
+ }
+ merged
+ }
+
+ def mergeParams(ps1: List[PolyParam], ps2: List[PolyParam]) =
+ (ps1 /: ps2)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1)
+
+ def mergeEntries(e1: Type, e2: Type): Type = e1 match {
+ case e1: TypeBounds =>
+ e2 match {
+ case e2: TypeBounds => e1 & e2
+ case _ if e1 contains e2 => e2
+ case _ => mergeError
+ }
+ case tv1: TypeVar =>
+ e2 match {
+ case tv2: TypeVar if tv1.instanceOpt eq tv2.instanceOpt => e1
+ case _ => mergeError
+ }
+ case _ if e1 eq e2 => e1
+ case _ => mergeError
+ }
+
+ def mergeError = throw new AssertionError(i"cannot merge $this with $other")
+
+ val that = other.asInstanceOf[OrderingConstraint]
+ new OrderingConstraint(
+ merge(this.boundsMap, that.boundsMap, mergeEntries),
+ merge(this.lowerMap, that.lowerMap, mergeParams),
+ merge(this.upperMap, that.upperMap, mergeParams))
+ }
+
+ override def checkClosed()(implicit ctx: Context): Unit = {
+ def isFreePolyParam(tp: Type) = tp match {
+ case PolyParam(binder: PolyType, _) => !contains(binder)
+ case _ => false
+ }
+ def checkClosedType(tp: Type, where: String) =
+ if (tp != null)
+ assert(!tp.existsPart(isFreePolyParam), i"unclosed constraint: $this refers to $tp in $where")
+ boundsMap.foreachBinding((_, tps) => tps.foreach(checkClosedType(_, "bounds")))
+ lowerMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "lower"))))
+ upperMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "upper"))))
+ }
+
+ private var myUninstVars: mutable.ArrayBuffer[TypeVar] = _
+
+ /** The uninstantiated typevars of this constraint */
+ def uninstVars: collection.Seq[TypeVar] = {
+ if (myUninstVars == null) {
+ myUninstVars = new mutable.ArrayBuffer[TypeVar]
+ boundsMap.foreachBinding { (poly, entries) =>
+ for (i <- 0 until paramCount(entries)) {
+ typeVar(entries, i) match {
+ case tv: TypeVar if !tv.inst.exists && isBounds(entries(i)) => myUninstVars += tv
+ case _ =>
+ }
+ }
+ }
+ }
+ myUninstVars
+ }
+
+// ---------- Cyclic checking -------------------------------------------
+
+ def checkNonCyclic()(implicit ctx: Context): Unit =
+ domainParams.foreach(checkNonCyclic)
+
+ private def checkNonCyclic(param: PolyParam)(implicit ctx: Context): Unit =
+ assert(!isLess(param, param), i"cyclic constraint involving $param in $this")
+
+// ---------- toText -----------------------------------------------------
+
+ override def toText(printer: Printer): Text = {
+ def entryText(tp: Type) = tp match {
+ case tp: TypeBounds =>
+ tp.toText(printer)
+ case _ =>
+ " := " ~ tp.toText(printer)
+ }
+ val indent = 3
+ val header: Text = "Constraint("
+ val uninstVarsText = " uninstVars = " ~
+ Text(uninstVars map (_.toText(printer)), ", ") ~ ";"
+ val constrainedText =
+ " constrained types = " ~ Text(domainPolys map (_.toText(printer)), ", ")
+ val boundsText =
+ " bounds = " ~ {
+ val assocs =
+ for (param <- domainParams)
+ yield (" " * indent) ~ param.toText(printer) ~ entryText(entry(param))
+ Text(assocs, "\n")
+ }
+ val orderingText =
+ " ordering = " ~ {
+ val deps =
+ for {
+ param <- domainParams
+ ups = minUpper(param)
+ if ups.nonEmpty
+ }
+ yield
+ (" " * indent) ~ param.toText(printer) ~ " <: " ~
+ Text(ups.map(_.toText(printer)), ", ")
+ Text(deps, "\n")
+ }
+ Text.lines(List(header, uninstVarsText, constrainedText, boundsText, orderingText, ")"))
+ }
+
+ override def toString: String = {
+ def entryText(tp: Type): String = tp match {
+ case tp: TypeBounds => tp.toString
+ case _ =>" := " + tp
+ }
+ val constrainedText =
+ " constrained types = " + domainPolys.mkString("\n")
+ val boundsText =
+ " bounds = " + {
+ val assocs =
+ for (param <- domainParams)
+ yield
+ param.binder.paramNames(param.paramNum) + ": " + entryText(entry(param))
+ assocs.mkString("\n")
+ }
+ constrainedText + "\n" + boundsText
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala
new file mode 100644
index 000000000..6efadab7f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Periods.scala
@@ -0,0 +1,159 @@
+package dotty.tools.dotc.core
+
+import Contexts._
+import dotty.tools.dotc.util.DotClass
+
+/** Periods are the central "clock" of the compiler.
+ * A period consists of a run id and a phase id.
+ * run ids represent compiler runs
+ * phase ids represent compiler phases
+ */
+abstract class Periods extends DotClass { self: Context =>
+ import Periods._
+
+ /** The current phase identifier */
+ def phaseId: Int = period.phaseId
+
+ /** The current run identifier */
+ def runId: Int = period.runId
+
+ /** Execute `op` at given period */
+ def atPeriod[T](pd: Period)(op: Context => T): T =
+ op(ctx.fresh.setPeriod(pd))
+
+ /** Execute `op` at given phase id */
+ def atPhase[T](pid: PhaseId)(op: Context => T): T =
+ op(ctx.withPhase(pid))
+
+ /** The period containing the current period where denotations do not change.
+ * We compute this by taking as first phase the first phase less or equal to
+ * the current phase that has the same "nextTransformerId". As last phase
+ * we take the next transformer id following the current phase.
+ */
+ def stablePeriod = {
+ var first = phaseId
+ val nxTrans = ctx.base.nextDenotTransformerId(first)
+ while (first - 1 > NoPhaseId && (ctx.base.nextDenotTransformerId(first - 1) == nxTrans)) {
+ first -= 1
+ }
+ Period(runId, first, nxTrans)
+ }
+}
+
+object Periods {
+
+ /** A period is a contiguous sequence of phase ids in some run.
+ * It is coded as follows:
+ *
+ * sign, always 0 1 bit
+ * runid 19 bits
+ * last phase id: 6 bits
+ * #phases before last: 6 bits
+ *
+ * // Dmitry: sign == 0 isn't actually always true, in some cases phaseId == -1 is used for shifts, that easily creates code < 0
+ */
+ class Period(val code: Int) extends AnyVal {
+
+ /** The run identifier of this period. */
+ def runId: RunId = code >>> (PhaseWidth * 2)
+
+ /** The phase identifier of this single-phase period. */
+ def phaseId: PhaseId = (code >>> PhaseWidth) & PhaseMask
+
+ /** The last phase of this period */
+ def lastPhaseId: PhaseId =
+ (code >>> PhaseWidth) & PhaseMask
+
+ /** The first phase of this period */
+ def firstPhaseId = lastPhaseId - (code & PhaseMask)
+
+ def containsPhaseId(id: PhaseId) = firstPhaseId <= id && id <= lastPhaseId
+
+ /** Does this period contain given period? */
+ def contains(that: Period): Boolean = {
+ // Let this = (r1, l1, d1), that = (r2, l2, d2)
+ // where r = runid, l = last phase, d = duration - 1
+ // Then seen as intervals:
+ //
+ // this = r1 / (l1 - d1) .. l1
+ // that = r2 / (l2 - d2) .. l2
+ //
+ // Let's compute:
+ //
+ // lastDiff = X * 2^5 + (l1 - l2) mod 2^5
+ // where X >= 0, X == 0 iff r1 == r2 & l1 - l2 >= 0
+ // result = lastDiff + d2 <= d1
+ // We have:
+ // lastDiff + d2 <= d1
+ // iff X == 0 && l1 - l2 >= 0 && l1 - l2 + d2 <= d1
+ // iff r1 == r2 & l1 >= l2 && l1 - d1 <= l2 - d2
+ // q.e.d
+ val lastDiff = (code - that.code) >>> PhaseWidth
+ lastDiff + (that.code & PhaseMask ) <= (this.code & PhaseMask)
+ }
+
+ /** Does this period overlap with given period? */
+ def overlaps(that: Period): Boolean =
+ this.runId == that.runId &&
+ this.firstPhaseId <= that.lastPhaseId &&
+ that.firstPhaseId <= this.lastPhaseId
+
+ /** The intersection of two periods */
+ def & (that: Period): Period =
+ if (this overlaps that)
+ Period(
+ this.runId,
+ this.firstPhaseId max that.firstPhaseId,
+ this.lastPhaseId min that.lastPhaseId)
+ else
+ Nowhere
+
+ def | (that: Period): Period =
+ Period(this.runId,
+ this.firstPhaseId min that.firstPhaseId,
+ this.lastPhaseId max that.lastPhaseId)
+
+ override def toString = s"Period($firstPhaseId..$lastPhaseId, run = $runId)"
+ }
+
+ object Period {
+
+ /** The single-phase period consisting of given run id and phase id */
+ def apply(rid: RunId, pid: PhaseId): Period = {
+ new Period(((rid << PhaseWidth) | pid) << PhaseWidth)
+ }
+
+ /** The period consisting of given run id, and lo/hi phase ids */
+ def apply(rid: RunId, loPid: PhaseId, hiPid: PhaseId): Period = {
+ new Period(((rid << PhaseWidth) | hiPid) << PhaseWidth | (hiPid - loPid))
+ }
+
+ /** The interval consisting of all periods of given run id */
+ def allInRun(rid: RunId) = {
+ apply(rid, 0, PhaseMask)
+ }
+ }
+
+ final val Nowhere = new Period(0)
+
+ final val InitialPeriod = Period(InitialRunId, FirstPhaseId)
+
+ final val InvalidPeriod = Period(NoRunId, NoPhaseId)
+
+ /** An ordinal number for compiler runs. First run has number 1. */
+ type RunId = Int
+ final val NoRunId = 0
+ final val InitialRunId = 1
+ final val RunWidth = java.lang.Integer.SIZE - PhaseWidth * 2 - 1/* sign */
+ final val MaxPossibleRunId = (1 << RunWidth) - 1
+
+ /** An ordinal number for phases. First phase has number 1. */
+ type PhaseId = Int
+ final val NoPhaseId = 0
+ final val FirstPhaseId = 1
+
+ /** The number of bits needed to encode a phase identifier. */
+ final val PhaseWidth = 6
+ final val PhaseMask = (1 << PhaseWidth) - 1
+ final val MaxPossiblePhaseId = PhaseMask
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala
new file mode 100644
index 000000000..222e2235d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Phases.scala
@@ -0,0 +1,377 @@
+package dotty.tools.dotc
+package core
+
+import Periods._
+import Contexts._
+import dotty.tools.backend.jvm.{LabelDefs, GenBCode}
+import dotty.tools.dotc.core.Symbols.ClassSymbol
+import util.DotClass
+import DenotTransformers._
+import Denotations._
+import Decorators._
+import config.Printers.config
+import scala.collection.mutable.{ListBuffer, ArrayBuffer}
+import dotty.tools.dotc.transform.TreeTransforms.{TreeTransformer, MiniPhase, TreeTransform}
+import dotty.tools.dotc.transform._
+import Periods._
+import typer.{FrontEnd, RefChecks}
+import ast.tpd
+
+trait Phases {
+ self: Context =>
+
+ import Phases._
+
+ def phase: Phase = base.phases(period.firstPhaseId)
+
+ def phasesStack: List[Phase] =
+ if ((this eq NoContext) || !phase.exists) Nil
+ else phase :: outersIterator.dropWhile(_.phase == phase).next.phasesStack
+
+ /** Execute `op` at given phase */
+ def atPhase[T](phase: Phase)(op: Context => T): T =
+ atPhase(phase.id)(op)
+
+ def atNextPhase[T](op: Context => T): T = atPhase(phase.next)(op)
+
+ def atPhaseNotLaterThan[T](limit: Phase)(op: Context => T): T =
+ if (!limit.exists || phase <= limit) op(this) else atPhase(limit)(op)
+
+ def atPhaseNotLaterThanTyper[T](op: Context => T): T =
+ atPhaseNotLaterThan(base.typerPhase)(op)
+
+ def isAfterTyper: Boolean = base.isAfterTyper(phase)
+}
+
+object Phases {
+
+ trait PhasesBase {
+ this: ContextBase =>
+
+ // drop NoPhase at beginning
+ def allPhases = (if (squashedPhases.nonEmpty) squashedPhases else phases).tail
+
+ object NoPhase extends Phase {
+ override def exists = false
+ def phaseName = "<no phase>"
+ def run(implicit ctx: Context): Unit = unsupported("run")
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = unsupported("transform")
+ }
+
+ object SomePhase extends Phase {
+ def phaseName = "<some phase>"
+ def run(implicit ctx: Context): Unit = unsupported("run")
+ }
+
+ /** A sentinel transformer object */
+ class TerminalPhase extends DenotTransformer {
+ def phaseName = "terminal"
+ def run(implicit ctx: Context): Unit = unsupported("run")
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation =
+ unsupported("transform")
+ override def lastPhaseId(implicit ctx: Context) = id
+ }
+
+ def phasePlan = this.phasesPlan
+ def setPhasePlan(phasess: List[List[Phase]]) = this.phasesPlan = phasess
+
+ /** Squash TreeTransform's beloning to same sublist to a single TreeTransformer
+ * Each TreeTransform gets own period,
+ * whereas a combined TreeTransformer gets period equal to union of periods of it's TreeTransforms
+ */
+ def squashPhases(phasess: List[List[Phase]],
+ phasesToSkip: List[String], stopBeforePhases: List[String], stopAfterPhases: List[String], YCheckAfter: List[String]): List[Phase] = {
+ val squashedPhases = ListBuffer[Phase]()
+ var prevPhases: Set[Class[_ <: Phase]] = Set.empty
+ val YCheckAll = YCheckAfter.contains("all")
+
+ var stop = false
+ val filteredPhases = phasess.map(_.filter { p =>
+ val pstop = stop
+ stop = stop | stopBeforePhases.contains(p.phaseName) | stopAfterPhases.contains(p.phaseName)
+ !(pstop || stopBeforePhases.contains(p.phaseName) || phasesToSkip.contains(p.phaseName))
+ })
+
+ var i = 0
+
+ while (i < filteredPhases.length) {
+ if (filteredPhases(i).nonEmpty) { //could be empty due to filtering
+ val filteredPhaseBlock = filteredPhases(i)
+ val phaseToAdd =
+ if (filteredPhaseBlock.length > 1) {
+ val phasesInBlock: Set[String] = filteredPhaseBlock.map(_.phaseName).toSet
+ for (phase <- filteredPhaseBlock) {
+ phase match {
+ case p: MiniPhase =>
+ val unmetRequirements = p.runsAfterGroupsOf &~ prevPhases
+ assert(unmetRequirements.isEmpty,
+ s"${phase.phaseName} requires ${unmetRequirements.mkString(", ")} to be in different TreeTransformer")
+
+ case _ =>
+ assert(false, s"Only tree transforms can be squashed, ${phase.phaseName} can not be squashed")
+ }
+ }
+ val block = new TreeTransformer {
+ override def phaseName: String = miniPhases.map(_.phaseName).mkString("TreeTransform:{", ", ", "}")
+ override def miniPhases: Array[MiniPhase] = filteredPhaseBlock.asInstanceOf[List[MiniPhase]].toArray
+ }
+ prevPhases ++= filteredPhaseBlock.map(_.getClazz)
+ block
+ } else { // block of a single phase, no squashing
+ val phase = filteredPhaseBlock.head
+ prevPhases += phase.getClazz
+ phase
+ }
+ squashedPhases += phaseToAdd
+ val shouldAddYCheck = YCheckAfter.containsPhase(phaseToAdd) || YCheckAll
+ if (shouldAddYCheck) {
+ val checker = new TreeChecker
+ squashedPhases += checker
+ }
+ }
+
+ i += 1
+ }
+ squashedPhases.toList
+ }
+
+ /** Use the following phases in the order they are given.
+ * The list should never contain NoPhase.
+ * if squashing is enabled, phases in same subgroup will be squashed to single phase.
+ */
+ def usePhases(phasess: List[Phase], squash: Boolean = true) = {
+
+ val flatPhases = collection.mutable.ListBuffer[Phase]()
+
+ phasess.foreach(p => p match {
+ case t: TreeTransformer => flatPhases ++= t.miniPhases
+ case _ => flatPhases += p
+ })
+
+ phases = (NoPhase :: flatPhases.toList ::: new TerminalPhase :: Nil).toArray
+ var phasesAfter:Set[Class[_ <: Phase]] = Set.empty
+ nextDenotTransformerId = new Array[Int](phases.length)
+ denotTransformers = new Array[DenotTransformer](phases.length)
+
+ var phaseId = 0
+ def nextPhaseId = {
+ phaseId += 1
+ phaseId // starting from 1 as NoPhase is 0
+ }
+
+ def checkRequirements(p: Phase) = {
+ val unmetPrecedeRequirements = p.runsAfter -- phasesAfter
+ assert(unmetPrecedeRequirements.isEmpty,
+ s"phase ${p} has unmet requirement: ${unmetPrecedeRequirements.mkString(", ")} should precede this phase")
+ phasesAfter += p.getClazz
+
+ }
+ var i = 0
+
+ while (i < phasess.length) {
+ val phase = phasess(i)
+ phase match {
+ case t: TreeTransformer =>
+ val miniPhases = t.miniPhases
+ miniPhases.foreach{ phase =>
+ checkRequirements(phase)
+ phase.init(this, nextPhaseId)}
+ t.init(this, miniPhases.head.id, miniPhases.last.id)
+ case _ =>
+ phase.init(this, nextPhaseId)
+ checkRequirements(phase)
+ }
+
+ i += 1
+ }
+
+ phases.last.init(this, nextPhaseId) // init terminal phase
+
+ i = phases.length
+ var lastTransformerId = i
+ while (i > 0) {
+ i -= 1
+ val phase = phases(i)
+ phase match {
+ case transformer: DenotTransformer =>
+ lastTransformerId = i
+ denotTransformers(i) = transformer
+ case _ =>
+ }
+ nextDenotTransformerId(i) = lastTransformerId
+ }
+
+ if (squash) {
+ this.squashedPhases = (NoPhase :: phasess).toArray
+ } else {
+ this.squashedPhases = this.phases
+ }
+
+ config.println(s"Phases = ${phases.deep}")
+ config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.deep}")
+ }
+
+ def phaseOfClass(pclass: Class[_]) = phases.find(pclass.isInstance).getOrElse(NoPhase)
+
+ private val cachedPhases = collection.mutable.Set[PhaseCache]()
+ private def cleanPhaseCache = cachedPhases.foreach(_.myPhase = NoPhase)
+
+ /** A cache to compute the phase with given name, which
+ * stores the phase as soon as phaseNamed returns something
+ * different from NoPhase.
+ */
+ private class PhaseCache(pclass: Class[_ <: Phase]) {
+ var myPhase: Phase = NoPhase
+ def phase = {
+ if (myPhase eq NoPhase) myPhase = phaseOfClass(pclass)
+ myPhase
+ }
+ cachedPhases += this
+ }
+
+ private val typerCache = new PhaseCache(classOf[FrontEnd])
+ private val picklerCache = new PhaseCache(classOf[Pickler])
+
+ private val refChecksCache = new PhaseCache(classOf[RefChecks])
+ private val elimRepeatedCache = new PhaseCache(classOf[ElimRepeated])
+ private val extensionMethodsCache = new PhaseCache(classOf[ExtensionMethods])
+ private val erasureCache = new PhaseCache(classOf[Erasure])
+ private val elimErasedValueTypeCache = new PhaseCache(classOf[ElimErasedValueType])
+ private val patmatCache = new PhaseCache(classOf[PatternMatcher])
+ private val lambdaLiftCache = new PhaseCache(classOf[LambdaLift])
+ private val flattenCache = new PhaseCache(classOf[Flatten])
+ private val explicitOuterCache = new PhaseCache(classOf[ExplicitOuter])
+ private val gettersCache = new PhaseCache(classOf[Getters])
+ private val genBCodeCache = new PhaseCache(classOf[GenBCode])
+
+ def typerPhase = typerCache.phase
+ def picklerPhase = picklerCache.phase
+ def refchecksPhase = refChecksCache.phase
+ def elimRepeatedPhase = elimRepeatedCache.phase
+ def extensionMethodsPhase = extensionMethodsCache.phase
+ def erasurePhase = erasureCache.phase
+ def elimErasedValueTypePhase = elimErasedValueTypeCache.phase
+ def patmatPhase = patmatCache.phase
+ def lambdaLiftPhase = lambdaLiftCache.phase
+ def flattenPhase = flattenCache.phase
+ def explicitOuterPhase = explicitOuterCache.phase
+ def gettersPhase = gettersCache.phase
+ def genBCodePhase = genBCodeCache.phase
+
+ def isAfterTyper(phase: Phase): Boolean = phase.id > typerPhase.id
+ }
+
+ trait Phase extends DotClass {
+
+ def phaseName: String
+
+ /** List of names of phases that should precede this phase */
+ def runsAfter: Set[Class[_ <: Phase]] = Set.empty
+
+ def run(implicit ctx: Context): Unit
+
+ def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] =
+ units.map { unit =>
+ val unitCtx = ctx.fresh.setPhase(this.start).setCompilationUnit(unit)
+ run(unitCtx)
+ unitCtx.compilationUnit
+ }
+
+ def description: String = phaseName
+
+ /** Output should be checkable by TreeChecker */
+ def isCheckable: Boolean = true
+
+ /** Check what the phase achieves, to be called at any point after it is finished.
+ */
+ def checkPostCondition(tree: tpd.Tree)(implicit ctx: Context): Unit = ()
+
+ /** If set, allow missing or superfluous arguments in applications
+ * and type applications.
+ */
+ def relaxedTyping: Boolean = false
+
+ /** Is this phase the standard typerphase? True for FrontEnd, but
+ * not for other first phases (such as FromTasty). The predicate
+ * is tested in some places that perform checks and corrections. It's
+ * different from isAfterTyper (and cheaper to test).
+ */
+ def isTyper = false
+
+ def exists: Boolean = true
+
+ private var myPeriod: Period = Periods.InvalidPeriod
+ private var myBase: ContextBase = null
+ private var myErasedTypes = false
+ private var myFlatClasses = false
+ private var myRefChecked = false
+ private var mySymbolicRefs = false
+ private var myLabelsReordered = false
+
+
+ /** The sequence position of this phase in the given context where 0
+ * is reserved for NoPhase and the first real phase is at position 1.
+ * -1 if the phase is not installed in the context.
+ */
+ def id = myPeriod.firstPhaseId
+
+ def period = myPeriod
+ def start = myPeriod.firstPhaseId
+ def end = myPeriod.lastPhaseId
+
+ final def erasedTypes = myErasedTypes // Phase is after erasure
+ final def flatClasses = myFlatClasses // Phase is after flatten
+ final def refChecked = myRefChecked // Phase is after RefChecks
+ final def symbolicRefs = mySymbolicRefs // Phase is after ResolveSuper, newly generated TermRefs should be symbolic
+ final def labelsReordered = myLabelsReordered // Phase is after LabelDefs, labels are flattened and owner chains don't mirror this
+
+ protected[Phases] def init(base: ContextBase, start: Int, end:Int): Unit = {
+ if (start >= FirstPhaseId)
+ assert(myPeriod == Periods.InvalidPeriod, s"phase $this has already been used once; cannot be reused")
+ myBase = base
+ myPeriod = Period(NoRunId, start, end)
+ myErasedTypes = prev.getClass == classOf[Erasure] || prev.erasedTypes
+ myFlatClasses = prev.getClass == classOf[Flatten] || prev.flatClasses
+ myRefChecked = prev.getClass == classOf[RefChecks] || prev.refChecked
+ mySymbolicRefs = prev.getClass == classOf[ResolveSuper] || prev.symbolicRefs
+ myLabelsReordered = prev.getClass == classOf[LabelDefs] || prev.labelsReordered
+ }
+
+ protected[Phases] def init(base: ContextBase, id: Int): Unit = init(base, id, id)
+
+ final def <=(that: Phase) =
+ exists && id <= that.id
+
+ final def prev: Phase =
+ if (id > FirstPhaseId) myBase.phases(start - 1) else myBase.NoPhase
+
+ final def next: Phase =
+ if (hasNext) myBase.phases(end + 1) else myBase.NoPhase
+
+ final def hasNext = start >= FirstPhaseId && end + 1 < myBase.phases.length
+
+ final def iterator =
+ Iterator.iterate(this)(_.next) takeWhile (_.hasNext)
+
+ override def toString = phaseName
+ }
+
+ trait NeedsCompanions {
+ def isCompanionNeeded(cls: ClassSymbol)(implicit ctx: Context): Boolean
+ }
+
+ /** Replace all instances of `oldPhaseClass` in `current` phases
+ * by the result of `newPhases` applied to the old phase.
+ */
+ def replace(oldPhaseClass: Class[_ <: Phase], newPhases: Phase => List[Phase], current: List[List[Phase]]): List[List[Phase]] =
+ current.map(_.flatMap(phase =>
+ if (oldPhaseClass.isInstance(phase)) newPhases(phase) else phase :: Nil))
+
+ /** Dotty deviation: getClass yields Class[_], instead of [Class <: <type of receiver>].
+ * We can get back the old behavior using this decorator. We should also use the same
+ * trick for standard getClass.
+ */
+ private implicit class getClassDeco[T](val x: T) extends AnyVal {
+ def getClazz: Class[_ <: T] = x.getClass.asInstanceOf[Class[_ <: T]]
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala
new file mode 100644
index 000000000..3daa8117e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala
@@ -0,0 +1,437 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools.dotc
+package core
+
+import Symbols._
+import Types.{TermRef, NoPrefix}
+import Flags.Implicit
+import Names._
+import Periods._
+import Decorators._
+import Contexts._
+import Denotations._
+import SymDenotations._
+import printing.Texts._
+import printing.Printer
+import util.common._
+import util.DotClass
+import SymDenotations.NoDenotation
+import collection.mutable
+
+object Scopes {
+
+ /** Maximal fill factor of hash table */
+ private final val FillFactor = 2.0/3.0
+
+ /** A hashtable is created once current size exceeds MinHash * FillFactor
+ * The initial hash table has twice that size (i.e 16).
+ * This value must be a power of two, so that the index of an element can
+ * be computed as element.hashCode & (hashTable.length - 1)
+ */
+ private final val MinHash = 8
+
+ /** The maximal permissible number of recursions when creating
+ * a hashtable
+ */
+ private final val MaxRecursions = 1000
+
+ class ScopeEntry private[Scopes] (val name: Name, _sym: Symbol, val owner: Scope) {
+
+ var sym: Symbol = _sym
+
+ /** the next entry in the hash bucket
+ */
+ var tail: ScopeEntry = null
+
+ /** the preceding entry in this scope
+ */
+ var prev: ScopeEntry = null
+
+ override def toString: String = sym.toString
+ }
+
+ /** A scope contains a set of symbols. It can be an extension
+ * of some outer scope, from which it inherits all symbols.
+ * This class does not have any methods to add symbols to a scope
+ * or to delete them. These methods are provided by subclass
+ * MutableScope.
+ */
+ abstract class Scope extends DotClass with printing.Showable with Iterable[Symbol] {
+
+ /** The last scope-entry from which all others are reachable via `prev` */
+ private[dotc] def lastEntry: ScopeEntry
+
+ /** The number of symbols in this scope (including inherited ones
+ * from outer scopes).
+ */
+ def size: Int
+
+ /** The number of outer scopes from which symbols are inherited */
+ def nestingLevel: Int
+
+ /** The symbols in this scope in the order they were entered;
+ * inherited from outer ones first.
+ */
+ def toList: List[Symbol]
+
+ /** Return all symbols as an iterator in the order they were entered in this scope.
+ */
+ def iterator: Iterator[Symbol] = toList.iterator
+
+ /** Returns a new mutable scope with the same content as this one. */
+ def cloneScope(implicit ctx: Context): MutableScope
+
+ /** Is the scope empty? */
+ override def isEmpty: Boolean = lastEntry eq null
+
+ /** Lookup a symbol entry matching given name. */
+ def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry
+
+ /** Lookup next entry with same name as this one */
+ def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry
+
+ /** Lookup a symbol */
+ final def lookup(name: Name)(implicit ctx: Context): Symbol = {
+ val e = lookupEntry(name)
+ if (e eq null) NoSymbol else e.sym
+ }
+
+ /** Returns an iterator yielding every symbol with given name in this scope.
+ */
+ final def lookupAll(name: Name)(implicit ctx: Context): Iterator[Symbol] = new Iterator[Symbol] {
+ var e = lookupEntry(name)
+ def hasNext: Boolean = e ne null
+ def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
+ }
+
+ /** The denotation set of all the symbols with given name in this scope
+ * Symbols occur in the result in reverse order relative to their occurrence
+ * in `this.toList`.
+ */
+ final def denotsNamed(name: Name, select: SymDenotation => Boolean = selectAll)(implicit ctx: Context): PreDenotation = {
+ var syms: PreDenotation = NoDenotation
+ var e = lookupEntry(name)
+ while (e != null) {
+ val d = e.sym.denot
+ if (select(d)) syms = syms union d
+ e = lookupNextEntry(e)
+ }
+ syms
+ }
+
+ /** The scope that keeps only those symbols from this scope that match the
+ * given predicates. If all symbols match, returns the scope itself, otherwise
+ * a copy with the matching symbols.
+ */
+ final def filteredScope(p: Symbol => Boolean)(implicit ctx: Context): Scope = {
+ var result: MutableScope = null
+ for (sym <- iterator)
+ if (!p(sym)) {
+ if (result == null) result = cloneScope
+ result.unlink(sym)
+ }
+ if (result == null) this else result
+ }
+
+ def implicitDecls(implicit ctx: Context): List[TermRef] = Nil
+
+ def openForMutations: MutableScope = unsupported("openForMutations")
+
+ final def toText(printer: Printer): Text = printer.toText(this)
+
+ def checkConsistent()(implicit ctx: Context) = ()
+ }
+
+ /** A subclass of Scope that defines methods for entering and
+ * unlinking entries.
+ * Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead.
+ * This is necessary because when run from reflection every scope needs to have a
+ * SynchronizedScope as mixin.
+ */
+ class MutableScope protected[Scopes](initElems: ScopeEntry, initSize: Int, val nestingLevel: Int = 0)
+ extends Scope {
+
+ protected[Scopes] def this(base: Scope)(implicit ctx: Context) = {
+ this(base.lastEntry, base.size, base.nestingLevel + 1)
+ ensureCapacity(MinHash)(ctx) // WTH? it seems the implicit is not in scope for a secondary constructor call.
+ }
+
+ def this() = this(null, 0, 0)
+
+ private[dotc] var lastEntry: ScopeEntry = initElems
+
+ /** The size of the scope */
+ private[this] var _size = initSize
+
+ override final def size = _size
+ private def size_= (x: Int) = _size = x
+
+ /** the hash table
+ */
+ private var hashTable: Array[ScopeEntry] = null
+
+ /** a cache for all elements, to be used by symbol iterator.
+ */
+ private var elemsCache: List[Symbol] = null
+
+ /** Clone scope, taking care not to force the denotations of any symbols in the scope.
+ */
+ def cloneScope(implicit ctx: Context): MutableScope = {
+ val entries = new mutable.ArrayBuffer[ScopeEntry]
+ var e = lastEntry
+ while ((e ne null) && e.owner == this) {
+ entries += e
+ e = e.prev
+ }
+ val scope = newScope
+ for (i <- entries.length - 1 to 0 by -1) {
+ val e = entries(i)
+ scope.newScopeEntry(e.name, e.sym)
+ }
+ scope
+ }
+
+ /** create and enter a scope entry with given name and symbol */
+ protected def newScopeEntry(name: Name, sym: Symbol)(implicit ctx: Context): ScopeEntry = {
+ ensureCapacity(if (hashTable ne null) hashTable.length else MinHash)
+ val e = new ScopeEntry(name, sym, this)
+ e.prev = lastEntry
+ lastEntry = e
+ if (hashTable ne null) enterInHash(e)
+ size += 1
+ elemsCache = null
+ e
+ }
+
+ /** create and enter a scope entry */
+ protected def newScopeEntry(sym: Symbol)(implicit ctx: Context): ScopeEntry =
+ newScopeEntry(sym.name, sym)
+
+ private def enterInHash(e: ScopeEntry)(implicit ctx: Context): Unit = {
+ val idx = e.name.hashCode & (hashTable.length - 1)
+ e.tail = hashTable(idx)
+ assert(e.tail != e)
+ hashTable(idx) = e
+ }
+
+ /** enter a symbol in this scope. */
+ final def enter[T <: Symbol](sym: T)(implicit ctx: Context): T = {
+ if (sym.isType && ctx.phaseId <= ctx.typerPhase.id) {
+ assert(lookup(sym.name) == NoSymbol,
+ s"duplicate ${sym.debugString}; previous was ${lookup(sym.name).debugString}") // !!! DEBUG
+ }
+ newScopeEntry(sym)
+ sym
+ }
+
+ /** enter a symbol, asserting that no symbol with same name exists in scope */
+ final def enterUnique(sym: Symbol)(implicit ctx: Context): Unit = {
+ assert(lookup(sym.name) == NoSymbol, (sym.showLocated, lookup(sym.name).showLocated))
+ enter(sym)
+ }
+
+ private def ensureCapacity(tableSize: Int)(implicit ctx: Context): Unit =
+ if (size >= tableSize * FillFactor) createHash(tableSize * 2)
+
+ private def createHash(tableSize: Int)(implicit ctx: Context): Unit =
+ if (size > tableSize * FillFactor) createHash(tableSize * 2)
+ else {
+ hashTable = new Array[ScopeEntry](tableSize)
+ enterAllInHash(lastEntry)
+ // checkConsistent() // DEBUG
+ }
+
+ private def enterAllInHash(e: ScopeEntry, n: Int = 0)(implicit ctx: Context): Unit = {
+ if (e ne null) {
+ if (n < MaxRecursions) {
+ enterAllInHash(e.prev, n + 1)
+ enterInHash(e)
+ } else {
+ var entries: List[ScopeEntry] = List()
+ var ee = e
+ while (ee ne null) {
+ entries = ee :: entries
+ ee = ee.prev
+ }
+ entries foreach enterInHash
+ }
+ }
+ }
+
+ /** Remove entry from this scope (which is required to be present) */
+ final def unlink(e: ScopeEntry)(implicit ctx: Context): Unit = {
+ if (lastEntry == e) {
+ lastEntry = e.prev
+ } else {
+ var e1 = lastEntry
+ while (e1.prev != e) e1 = e1.prev
+ e1.prev = e.prev
+ }
+ if (hashTable ne null) {
+ val index = e.name.hashCode & (hashTable.length - 1)
+ var e1 = hashTable(index)
+ if (e1 == e)
+ hashTable(index) = e.tail
+ else {
+ while (e1.tail != e) e1 = e1.tail
+ e1.tail = e.tail
+ }
+ }
+ elemsCache = null
+ size -= 1
+ }
+
+ /** remove symbol from this scope if it is present */
+ final def unlink(sym: Symbol)(implicit ctx: Context): Unit = {
+ var e = lookupEntry(sym.name)
+ while (e ne null) {
+ if (e.sym == sym) unlink(e)
+ e = lookupNextEntry(e)
+ }
+ }
+
+ /** Replace symbol `prev` (if it exists in current scope) by symbol `replacement`.
+ * @pre `prev` and `replacement` have the same name.
+ */
+ final def replace(prev: Symbol, replacement: Symbol)(implicit ctx: Context): Unit = {
+ require(prev.name == replacement.name)
+ var e = lookupEntry(prev.name)
+ while (e ne null) {
+ if (e.sym == prev) e.sym = replacement
+ e = lookupNextEntry(e)
+ }
+ elemsCache = null
+ }
+
+ /** Lookup a symbol entry matching given name.
+ */
+ override final def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = {
+ var e: ScopeEntry = null
+ if (hashTable ne null) {
+ e = hashTable(name.hashCode & (hashTable.length - 1))
+ while ((e ne null) && e.name != name) {
+ e = e.tail
+ }
+ } else {
+ e = lastEntry
+ while ((e ne null) && e.name != name) {
+ e = e.prev
+ }
+ }
+ e
+ }
+
+ /** lookup next entry with same name as this one */
+ override final def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry = {
+ var e = entry
+ if (hashTable ne null)
+ do { e = e.tail } while ((e ne null) && e.name != entry.name)
+ else
+ do { e = e.prev } while ((e ne null) && e.name != entry.name)
+ e
+ }
+
+ /** Returns all symbols as a list in the order they were entered in this scope.
+ * Does _not_ include the elements of inherited scopes.
+ */
+ override final def toList: List[Symbol] = {
+ if (elemsCache eq null) {
+ elemsCache = Nil
+ var e = lastEntry
+ while ((e ne null) && e.owner == this) {
+ elemsCache = e.sym :: elemsCache
+ e = e.prev
+ }
+ }
+ elemsCache
+ }
+
+ override def implicitDecls(implicit ctx: Context): List[TermRef] = {
+ var irefs = new mutable.ListBuffer[TermRef]
+ var e = lastEntry
+ while (e ne null) {
+ if (e.sym is Implicit) {
+ val d = e.sym.denot
+ irefs += TermRef.withSigAndDenot(NoPrefix, d.name.asTermName, d.signature, d)
+ }
+ e = e.prev
+ }
+ irefs.toList
+ }
+
+ /** Vanilla scope - symbols are stored in declaration order.
+ */
+ final def sorted: List[Symbol] = toList
+
+ override def foreach[U](p: Symbol => U): Unit = toList foreach p
+
+ override def filter(p: Symbol => Boolean): List[Symbol] = {
+ var syms: List[Symbol] = Nil
+ var e = lastEntry
+ while ((e ne null) && e.owner == this) {
+ val sym = e.sym
+ if (p(sym)) syms = sym :: syms
+ e = e.prev
+ }
+ syms
+ }
+
+ override def openForMutations: MutableScope = this
+
+ /** Check that all symbols in this scope are in their correct hashtable buckets. */
+ override def checkConsistent()(implicit ctx: Context) = {
+ var e = lastEntry
+ while (e != null) {
+ var e1 = lookupEntry(e.name)
+ while (e1 != e && e1 != null) e1 = lookupNextEntry(e1)
+ assert(e1 == e, s"PANIC: Entry ${e.name} is badly linked")
+ e = e.prev
+ }
+ }
+ }
+
+ /** Create a new scope */
+ def newScope: MutableScope = new MutableScope()
+
+ /** Create a new scope nested in another one with which it shares its elements */
+ def newNestedScope(outer: Scope)(implicit ctx: Context): MutableScope = new MutableScope(outer)
+
+ /** Create a new scope with given initial elements */
+ def newScopeWith(elems: Symbol*)(implicit ctx: Context): MutableScope = {
+ val scope = newScope
+ elems foreach scope.enter
+ scope
+ }
+
+ /** Create new scope for the members of package `pkg` */
+ def newPackageScope(pkgClass: Symbol): MutableScope = newScope
+
+ /** Transform scope of members of `owner` using operation `op`
+ * This is overridden by the reflective compiler to avoid creating new scopes for packages
+ */
+ def scopeTransform(owner: Symbol)(op: => MutableScope): MutableScope = op
+
+ val selectAll: SymDenotation => Boolean = alwaysTrue
+ val selectPrivate: SymDenotation => Boolean = d => (d.flagsUNSAFE is Flags.Private)
+ val selectNonPrivate: SymDenotation => Boolean = d => !(d.flagsUNSAFE is Flags.Private)
+
+ /** The empty scope (immutable).
+ */
+ object EmptyScope extends Scope {
+ override private[dotc] def lastEntry = null
+ override def size = 0
+ override def nestingLevel = 0
+ override def toList = Nil
+ override def cloneScope(implicit ctx: Context): MutableScope = unsupported("cloneScope")
+ override def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = null
+ override def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry = null
+ }
+
+ /** A class for error scopes (mutable)
+ */
+ class ErrorScope(owner: Symbol) extends MutableScope
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala
new file mode 100644
index 000000000..b2e627cbe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Signature.scala
@@ -0,0 +1,103 @@
+package dotty.tools.dotc
+package core
+
+import Names._, Types._, Contexts._, StdNames._
+import TypeErasure.sigName
+
+/** The signature of a denotation.
+ * Overloaded denotations with the same name are distinguished by
+ * their signatures. A signature of a method (of type PolyType,MethodType, or ExprType) is
+ * composed of a list of signature names, one for each parameter type, plus a signature for
+ * the result type. Methods are uncurried before taking their signatures.
+ * The signature name of a type is the fully qualified name of the type symbol of the type's erasure.
+ *
+ * For instance a definition
+ *
+ * def f(x: Int)(y: List[String]): String
+ *
+ * would have signature
+ *
+ * Signature(
+ * List("scala.Int".toTypeName, "scala.collection.immutable.List".toTypeName),
+ * "scala.String".toTypeName)
+ *
+ * The signatures of non-method types are always `NotAMethod`.
+ *
+ * There are three kinds of "missing" parts of signatures:
+ *
+ * - tpnme.EMPTY Result type marker for NotAMethod and OverloadedSignature
+ * - tpnme.WILDCARD Arises from a Wildcard or error type
+ * - tpnme.Uninstantiated Arises from an uninstantiated type variable
+ */
+case class Signature(paramsSig: List[TypeName], resSig: TypeName) {
+ import Signature._
+
+ /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */
+ private def consistent(name1: TypeName, name2: TypeName) =
+ name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated
+
+ /** Does this signature coincide with that signature on their parameter parts?
+ * This is the case if all parameter names are _consistent_, i.e. they are either
+ * equal or on of them is tpnme.Uninstantiated.
+ */
+ final def consistentParams(that: Signature): Boolean = {
+ def loop(names1: List[TypeName], names2: List[TypeName]): Boolean =
+ if (names1.isEmpty) names2.isEmpty
+ else names2.nonEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail)
+ loop(this.paramsSig, that.paramsSig)
+ }
+
+ /** The degree to which this signature matches `that`.
+ * If parameter names are consistent and result types names match (i.e. they are the same
+ * or one is a wildcard), the result is `FullMatch`.
+ * If only the parameter names are consistent, the result is `ParamMatch` before erasure and
+ * `NoMatch` otherwise.
+ * If the parameters are inconsistent, the result is always `NoMatch`.
+ */
+ final def matchDegree(that: Signature)(implicit ctx: Context): MatchDegree =
+ if (consistentParams(that))
+ if (resSig == that.resSig || isWildcard(resSig) || isWildcard(that.resSig)) FullMatch
+ else if (!ctx.erasedTypes) ParamMatch
+ else NoMatch
+ else NoMatch
+
+ /** name.toString == "" or name.toString == "_" */
+ private def isWildcard(name: TypeName) = name.isEmpty || name == tpnme.WILDCARD
+
+ /** Construct a signature by prepending the signature names of the given `params`
+ * to the parameter part of this signature.
+ */
+ def prepend(params: List[Type], isJava: Boolean)(implicit ctx: Context) =
+ Signature((params.map(sigName(_, isJava))) ++ paramsSig, resSig)
+
+ /** A signature is under-defined if its paramsSig part contains at least one
+ * `tpnme.Uninstantiated`. Under-defined signatures arise when taking a signature
+ * of a type that still contains uninstantiated type variables. They are eliminated
+ * by `fixSignature` in `PostTyper`.
+ */
+ def isUnderDefined(implicit ctx: Context) =
+ paramsSig.contains(tpnme.Uninstantiated) || resSig == tpnme.Uninstantiated
+}
+
+object Signature {
+
+ type MatchDegree = Int
+ val NoMatch = 0
+ val ParamMatch = 1
+ val FullMatch = 2
+
+ /** The signature of everything that's not a method, i.e. that has
+ * a type different from PolyType, MethodType, or ExprType.
+ */
+ val NotAMethod = Signature(List(), EmptyTypeName)
+
+ /** The signature of an overloaded denotation.
+ */
+ val OverloadedSignature = Signature(List(tpnme.OVERLOADED), EmptyTypeName)
+
+ /** The signature of a method with no parameters and result type `resultType`. */
+ def apply(resultType: Type, isJava: Boolean)(implicit ctx: Context): Signature = {
+ assert(!resultType.isInstanceOf[ExprType])
+ apply(Nil, sigName(resultType, isJava))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala
new file mode 100644
index 000000000..c2a14b36f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala
@@ -0,0 +1,844 @@
+package dotty.tools.dotc
+package core
+
+import scala.language.implicitConversions
+import scala.collection.{mutable, immutable}
+import scala.annotation.switch
+import Names._
+import Symbols._
+import Contexts._
+import Decorators.StringDecorator
+import util.NameTransformer
+import scala.collection.breakOut
+
+object StdNames {
+
+/** Base strings from which synthetic names are derived. */
+
+ abstract class DefinedNames[N <: Name] {
+ protected implicit def fromString(s: String): N
+ protected def fromName(name: Name): N = fromString(name.toString)
+
+ private val kws = mutable.Set[N]()
+ protected def kw(name: N) = { kws += name; name }
+
+ final val keywords: collection.Set[N] = kws
+ }
+
+ abstract class ScalaNames[N <: Name] extends DefinedNames[N] {
+ protected def encode(s: String): N = fromName(fromString(s).encode)
+
+// Keywords, need to come first -----------------------
+
+ final val ABSTRACTkw: N = kw("abstract")
+ final val CASEkw: N = kw("case")
+ final val CLASSkw: N = kw("class")
+ final val CATCHkw: N = kw("catch")
+ final val DEFkw: N = kw("def")
+ final val DOkw: N = kw("do")
+ final val ELSEkw: N = kw("else")
+ final val EXTENDSkw: N = kw("extends")
+ final val FALSEkw: N = kw("false")
+ final val FINALkw: N = kw("final")
+ final val FINALLYkw: N = kw("finally")
+ final val FORkw: N = kw("for")
+ final val FORSOMEkw: N = kw("forSome")
+ final val IFkw: N = kw("if")
+ final val IMPLICITkw: N = kw("implicit")
+ final val IMPORTkw: N = kw("import")
+ final val INLINEkw: N = kw("inline")
+ final val LAZYkw: N = kw("lazy")
+ final val MACROkw: N = kw("macro")
+ final val MATCHkw: N = kw("match")
+ final val NEWkw: N = kw("new")
+ final val NULLkw: N = kw("null")
+ final val OBJECTkw: N = kw("object")
+ final val OVERRIDEkw: N = kw("override")
+ final val PACKAGEkw: N = kw("package")
+ final val PRIVATEkw: N = kw("private")
+ final val PROTECTEDkw: N = kw("protected")
+ final val RETURNkw: N = kw("return")
+ final val SEALEDkw: N = kw("sealed")
+ final val SUPERkw: N = kw("super")
+ final val THENkw: N = kw("then")
+ final val THISkw: N = kw("this")
+ final val THROWkw: N = kw("throw")
+ final val TRAITkw: N = kw("trait")
+ final val TRUEkw: N = kw("true")
+ final val TRYkw: N = kw("try")
+ final val TYPEkw: N = kw("type")
+ final val VALkw: N = kw("val")
+ final val VARkw: N = kw("var")
+ final val WITHkw: N = kw("with")
+ final val WHILEkw: N = kw("while")
+ final val YIELDkw: N = kw("yield")
+ final val DOTkw: N = kw(".")
+ final val USCOREkw: N = kw("_")
+ final val COLONkw: N = kw(":")
+ final val EQUALSkw: N = kw("=")
+ final val ARROWkw: N = kw("=>")
+ final val LARROWkw: N = kw("<-")
+ final val SUBTYPEkw: N = kw("<:")
+ final val VIEWBOUNDkw: N = kw("<%")
+ final val SUPERTYPEkw: N = kw(">:")
+ final val HASHkw: N = kw("#")
+ final val ATkw: N = kw("@")
+
+ val ANON_CLASS: N = "$anon"
+ val ANON_FUN: N = "$anonfun"
+ val BITMAP_PREFIX: N = "bitmap$"
+ val BITMAP_NORMAL: N = BITMAP_PREFIX // initialization bitmap for public/protected lazy vals
+ val BITMAP_TRANSIENT: N = BITMAP_PREFIX + "trans$" // initialization bitmap for transient lazy vals
+ val BITMAP_CHECKINIT: N = BITMAP_PREFIX + "init$" // initialization bitmap for checkinit values
+ val BITMAP_CHECKINIT_TRANSIENT: N = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values
+ val DEFAULT_GETTER: N = "$default$"
+ val DEFAULT_GETTER_INIT: N = NameTransformer.encode("<init>")
+ val DO_WHILE_PREFIX: N = "doWhile$"
+ val EMPTY: N = ""
+ val EMPTY_PACKAGE: N = Names.EMPTY_PACKAGE.toString
+ val EVIDENCE_PARAM_PREFIX: N = "evidence$"
+ val EXCEPTION_RESULT_PREFIX: N = "exceptionResult"
+ val EXPAND_SEPARATOR: N = "$$"
+ val IMPL_CLASS_SUFFIX: N = "$class"
+ val IMPORT: N = "<import>"
+ val INLINE_ACCESSOR_PREFIX = "$inlineAccessor$"
+ val INTERPRETER_IMPORT_WRAPPER: N = "$iw"
+ val INTERPRETER_LINE_PREFIX: N = "line"
+ val INTERPRETER_VAR_PREFIX: N = "res"
+ val INTERPRETER_WRAPPER_SUFFIX: N = "$object"
+ val LOCALDUMMY_PREFIX: N = "<local " // owner of local blocks
+ val MODULE_SUFFIX: N = NameTransformer.MODULE_SUFFIX_STRING
+ val AVOID_CLASH_SUFFIX: N = "$_avoid_name_clash_$"
+ val MODULE_VAR_SUFFIX: N = "$module"
+ val NAME_JOIN: N = NameTransformer.NAME_JOIN_STRING
+ val USCORE_PARAM_PREFIX: N = "_$"
+ val OPS_PACKAGE: N = "<special-ops>"
+ val OVERLOADED: N = "<overloaded>"
+ val PACKAGE: N = "package"
+ val PACKAGE_CLS: N = "package$"
+ val PROTECTED_PREFIX: N = "protected$"
+ val PROTECTED_SET_PREFIX: N = PROTECTED_PREFIX + "set"
+ val ROOT: N = "<root>"
+ val SHADOWED: N = "(shadowed)" // tag to be used until we have proper name kinds
+ val SINGLETON_SUFFIX: N = ".type"
+ val SPECIALIZED_SUFFIX: N = "$sp"
+ val SUPER_PREFIX: N = "super$"
+ val WHILE_PREFIX: N = "while$"
+ val DEFAULT_EXCEPTION_NAME: N = "ex$"
+ val INITIALIZER_PREFIX: N = "initial$"
+ val COMPANION_MODULE_METHOD: N = "companion$module"
+ val COMPANION_CLASS_METHOD: N = "companion$class"
+ val TRAIT_SETTER_SEPARATOR: N = "$_setter_$"
+
+ // value types (and AnyRef) are all used as terms as well
+ // as (at least) arguments to the @specialize annotation.
+ final val Boolean: N = "Boolean"
+ final val Byte: N = "Byte"
+ final val Char: N = "Char"
+ final val Double: N = "Double"
+ final val Float: N = "Float"
+ final val Int: N = "Int"
+ final val Long: N = "Long"
+ final val Short: N = "Short"
+ final val Unit: N = "Unit"
+
+ final val ScalaValueNames: scala.List[N] =
+ scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
+
+ // some types whose companions we utilize
+ final val AnyRef: N = "AnyRef"
+ final val Array: N = "Array"
+ final val List: N = "List"
+ final val Seq: N = "Seq"
+ final val Symbol: N = "Symbol"
+ final val ClassTag: N = "ClassTag"
+ final val classTag: N = "classTag"
+ final val WeakTypeTag: N = "WeakTypeTag"
+ final val TypeTag : N = "TypeTag"
+ final val typeTag: N = "typeTag"
+ final val Expr: N = "Expr"
+ final val String: N = "String"
+ final val Annotation: N = "Annotation"
+
+ // fictions we use as both types and terms
+ final val ERROR: N = "<error>"
+ final val ERRORenc: N = encode("<error>")
+ final val NO_NAME: N = "<none>" // formerly NOSYMBOL
+ final val WILDCARD: N = "_"
+
+// ----- Type names -----------------------------------------
+
+ final val BYNAME_PARAM_CLASS: N = "<byname>"
+ final val EQUALS_PATTERN: N = "<equals>"
+ final val LOCAL_CHILD: N = "<local child>"
+ final val REPEATED_PARAM_CLASS: N = "<repeated>"
+ final val WILDCARD_STAR: N = "_*"
+ final val REIFY_TREECREATOR_PREFIX: N = "$treecreator"
+ final val REIFY_TYPECREATOR_PREFIX: N = "$typecreator"
+
+ final val AbstractFunction: N = "AbstractFunction"
+ final val Any: N = "Any"
+ final val AnyVal: N = "AnyVal"
+ final val ExprApi: N = "ExprApi"
+ final val Function: N = "Function"
+ final val Mirror: N = "Mirror"
+ final val Nothing: N = "Nothing"
+ final val Null: N = "Null"
+ final val Object: N = "Object"
+ final val PartialFunction: N = "PartialFunction"
+ final val PrefixType: N = "PrefixType"
+ final val Product: N = "Product"
+ final val Serializable: N = "Serializable"
+ final val Singleton: N = "Singleton"
+ final val Throwable: N = "Throwable"
+ final val Tuple: N = "Tuple"
+
+ final val ClassfileAnnotation: N = "ClassfileAnnotation"
+ final val ClassManifest: N = "ClassManifest"
+ final val Enum: N = "Enum"
+ final val Group: N = "Group"
+ final val Tree: N = "Tree"
+ final val Type : N = "Type"
+ final val TypeTree: N = "TypeTree"
+
+ // Annotation simple names, used in Namer
+ final val BeanPropertyAnnot: N = "BeanProperty"
+ final val BooleanBeanPropertyAnnot: N = "BooleanBeanProperty"
+ final val bridgeAnnot: N = "bridge"
+
+ // Classfile Attributes
+ final val AnnotationDefaultATTR: N = "AnnotationDefault"
+ final val BridgeATTR: N = "Bridge"
+ final val ClassfileAnnotationATTR: N = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
+ final val CodeATTR: N = "Code"
+ final val ConstantValueATTR: N = "ConstantValue"
+ final val DeprecatedATTR: N = "Deprecated"
+ final val ExceptionsATTR: N = "Exceptions"
+ final val InnerClassesATTR: N = "InnerClasses"
+ final val LineNumberTableATTR: N = "LineNumberTable"
+ final val LocalVariableTableATTR: N = "LocalVariableTable"
+ final val RuntimeAnnotationATTR: N = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
+ final val RuntimeParamAnnotationATTR: N = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
+ final val ScalaATTR: N = "Scala"
+ final val ScalaSignatureATTR: N = "ScalaSig"
+ final val TASTYATTR: N = "TASTY"
+ final val SignatureATTR: N = "Signature"
+ final val SourceFileATTR: N = "SourceFile"
+ final val SyntheticATTR: N = "Synthetic"
+
+// ----- Term names -----------------------------------------
+
+ // Compiler-internal
+ val ANYname: N = "<anyname>"
+ val CONSTRUCTOR: N = Names.CONSTRUCTOR.toString
+ val DEFAULT_CASE: N = "defaultCase$"
+ val EVT2U: N = "evt2u$"
+ val EQEQ_LOCAL_VAR: N = "eqEqTemp$"
+ val FAKE_LOCAL_THIS: N = "this$"
+ val LAZY_LOCAL: N = "$lzy"
+ val LAZY_LOCAL_INIT: N = "$lzyINIT"
+ val LAZY_FIELD_OFFSET: N = "OFFSET$"
+ val LAZY_SLOW_SUFFIX: N = "$lzycompute"
+ val LOCAL_SUFFIX: N = "$$local"
+ val UNIVERSE_BUILD_PREFIX: N = "$u.build."
+ val UNIVERSE_BUILD: N = "$u.build"
+ val UNIVERSE_PREFIX: N = "$u."
+ val UNIVERSE_SHORT: N = "$u"
+ val MIRROR_PREFIX: N = "$m."
+ val MIRROR_SHORT: N = "$m"
+ val MIRROR_UNTYPED: N = "$m$untyped"
+ val REIFY_FREE_PREFIX: N = "free$"
+ val REIFY_FREE_THIS_SUFFIX: N = "$this"
+ val REIFY_FREE_VALUE_SUFFIX: N = "$value"
+ val REIFY_SYMDEF_PREFIX: N = "symdef$"
+ val MODULE_INSTANCE_FIELD: N = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
+ val OUTER: N = "$outer"
+ val OUTER_LOCAL: N = "$outer "
+ val OUTER_SELECT: N = "_<outer>" // emitted by inliner, replaced by outer path in explicitouter
+ val REFINE_CLASS: N = "<refinement>"
+ val ROOTPKG: N = "_root_"
+ val SELECTOR_DUMMY: N = "<unapply-selector>"
+ val SELF: N = "$this"
+ val SETTER_SUFFIX: N = encode("_=")
+ val SKOLEM: N = "<skolem>"
+ val SPECIALIZED_INSTANCE: N = "specInstance$"
+ val THIS: N = "_$this"
+ val TRAIT_CONSTRUCTOR: N = "$init$"
+ val U2EVT: N = "u2evt$"
+
+ final val Nil: N = "Nil"
+ final val Predef: N = "Predef"
+ final val ScalaRunTime: N = "ScalaRunTime"
+ final val Some: N = "Some"
+
+ val x_0 : N = "x$0"
+ val x_1 : N = "x$1"
+ val x_2 : N = "x$2"
+ val x_3 : N = "x$3"
+ val x_4 : N = "x$4"
+ val x_5 : N = "x$5"
+ val x_6 : N = "x$6"
+ val x_7 : N = "x$7"
+ val x_8 : N = "x$8"
+ val x_9 : N = "x$9"
+ val _1 : N = "_1"
+ val _2 : N = "_2"
+ val _3 : N = "_3"
+ val _4 : N = "_4"
+ val _5 : N = "_5"
+ val _6 : N = "_6"
+ val _7 : N = "_7"
+ val _8 : N = "_8"
+ val _9 : N = "_9"
+ val _10 : N = "_10"
+ val _11 : N = "_11"
+ val _12 : N = "_12"
+ val _13 : N = "_13"
+ val _14 : N = "_14"
+ val _15 : N = "_15"
+ val _16 : N = "_16"
+ val _17 : N = "_17"
+ val _18 : N = "_18"
+ val _19 : N = "_19"
+ val _20 : N = "_20"
+ val _21 : N = "_21"
+ val _22 : N = "_22"
+
+ val ??? = encode("???")
+
+ val genericWrapArray: N = "genericWrapArray"
+ def wrapRefArray: N = "wrapRefArray"
+ def wrapXArray(clsName: Name): N = "wrap" + clsName + "Array"
+
+ // Compiler utilized names
+
+ val AnnotatedType: N = "AnnotatedType"
+ val AppliedTypeTree: N = "AppliedTypeTree"
+ val ArrayAnnotArg: N = "ArrayAnnotArg"
+ val Constant: N = "Constant"
+ val ConstantType: N = "ConstantType"
+ val ExistentialTypeTree: N = "ExistentialTypeTree"
+ val Flag : N = "Flag"
+ val Ident: N = "Ident"
+ val Import: N = "Import"
+ val Literal: N = "Literal"
+ val LiteralAnnotArg: N = "LiteralAnnotArg"
+ val Modifiers: N = "Modifiers"
+ val NestedAnnotArg: N = "NestedAnnotArg"
+ val NoFlags: N = "NoFlags"
+ val NoPrefix: N = "NoPrefix"
+ val NoSymbol: N = "NoSymbol"
+ val NoType: N = "NoType"
+ val Pair: N = "Pair"
+ val Ref: N = "Ref"
+ val RootPackage: N = "RootPackage"
+ val RootClass: N = "RootClass"
+ val Scala2: N = "Scala2"
+ val Select: N = "Select"
+ val StringContext: N = "StringContext"
+ val This: N = "This"
+ val ThisType: N = "ThisType"
+ val Tuple2: N = "Tuple2"
+ val TYPE_ : N = "TYPE"
+ val TypeApply: N = "TypeApply"
+ val TypeRef: N = "TypeRef"
+ val UNIT : N = "UNIT"
+ val add_ : N = "add"
+ val annotation: N = "annotation"
+ val anyValClass: N = "anyValClass"
+ val append: N = "append"
+ val apply: N = "apply"
+ val applyDynamic: N = "applyDynamic"
+ val applyDynamicNamed: N = "applyDynamicNamed"
+ val applyOrElse: N = "applyOrElse"
+ val args : N = "args"
+ val argv : N = "argv"
+ val arrayClass: N = "arrayClass"
+ val arrayElementClass: N = "arrayElementClass"
+ val arrayValue: N = "arrayValue"
+ val array_apply : N = "array_apply"
+ val array_clone : N = "array_clone"
+ val array_length : N = "array_length"
+ val array_update : N = "array_update"
+ val arraycopy: N = "arraycopy"
+ val asTerm: N = "asTerm"
+ val asModule: N = "asModule"
+ val asMethod: N = "asMethod"
+ val asType: N = "asType"
+ val asClass: N = "asClass"
+ val asInstanceOf_ : N = "asInstanceOf"
+ val assert_ : N = "assert"
+ val assume_ : N = "assume"
+ val box: N = "box"
+ val build : N = "build"
+ val bytes: N = "bytes"
+ val canEqual_ : N = "canEqual"
+ val checkInitialized: N = "checkInitialized"
+ val ClassManifestFactory: N = "ClassManifestFactory"
+ val classOf: N = "classOf"
+ val clone_ : N = "clone"
+ // val conforms : N = "conforms" // Dotty deviation: no special treatment of conforms, so the occurrence of the name here would cause to unintended implicit shadowing. Should find a less common name for it in Predef.
+ val copy: N = "copy"
+ val currentMirror: N = "currentMirror"
+ val create: N = "create"
+ val definitions: N = "definitions"
+ val delayedInit: N = "delayedInit"
+ val delayedInitArg: N = "delayedInit$body"
+ val drop: N = "drop"
+ val dynamics: N = "dynamics"
+ val dummyApply: N = "<dummy-apply>"
+ val elem: N = "elem"
+ val emptyValDef: N = "emptyValDef"
+ val ensureAccessible : N = "ensureAccessible"
+ val eq: N = "eq"
+ val equalsNumChar : N = "equalsNumChar"
+ val equalsNumNum : N = "equalsNumNum"
+ val equalsNumObject : N = "equalsNumObject"
+ val equals_ : N = "equals"
+ val error: N = "error"
+ val eval: N = "eval"
+ val eqAny: N = "eqAny"
+ val ex: N = "ex"
+ val experimental: N = "experimental"
+ val f: N = "f"
+ val false_ : N = "false"
+ val filter: N = "filter"
+ val finalize_ : N = "finalize"
+ val find_ : N = "find"
+ val flagsFromBits : N = "flagsFromBits"
+ val flatMap: N = "flatMap"
+ val foreach: N = "foreach"
+ val genericArrayOps: N = "genericArrayOps"
+ val get: N = "get"
+ val getClass_ : N = "getClass"
+ val getOrElse: N = "getOrElse"
+ val hasNext: N = "hasNext"
+ val hashCode_ : N = "hashCode"
+ val hash_ : N = "hash"
+ val head: N = "head"
+ val higherKinds: N = "higherKinds"
+ val identity: N = "identity"
+ val implicitly: N = "implicitly"
+ val in: N = "in"
+ val info: N = "info"
+ val inlinedEquals: N = "inlinedEquals"
+ val isArray: N = "isArray"
+ val isDefined: N = "isDefined"
+ val isDefinedAt: N = "isDefinedAt"
+ val isDefinedAtImpl: N = "$isDefinedAt"
+ val isEmpty: N = "isEmpty"
+ val isInstanceOf_ : N = "isInstanceOf"
+ val java: N = "java"
+ val key: N = "key"
+ val lang: N = "lang"
+ val length: N = "length"
+ val lengthCompare: N = "lengthCompare"
+ val liftedTree: N = "liftedTree"
+ val `macro` : N = "macro"
+ val macroThis : N = "_this"
+ val macroContext : N = "c"
+ val main: N = "main"
+ val manifest: N = "manifest"
+ val ManifestFactory: N = "ManifestFactory"
+ val manifestToTypeTag: N = "manifestToTypeTag"
+ val map: N = "map"
+ val materializeClassTag: N = "materializeClassTag"
+ val materializeWeakTypeTag: N = "materializeWeakTypeTag"
+ val materializeTypeTag: N = "materializeTypeTag"
+ val mirror : N = "mirror"
+ val moduleClass : N = "moduleClass"
+ val name: N = "name"
+ val ne: N = "ne"
+ val newFreeTerm: N = "newFreeTerm"
+ val newFreeType: N = "newFreeType"
+ val newNestedSymbol: N = "newNestedSymbol"
+ val newScopeWith: N = "newScopeWith"
+ val next: N = "next"
+ val nmeNewTermName: N = "newTermName"
+ val nmeNewTypeName: N = "newTypeName"
+ val noAutoTupling: N = "noAutoTupling"
+ val normalize: N = "normalize"
+ val notifyAll_ : N = "notifyAll"
+ val notify_ : N = "notify"
+ val null_ : N = "null"
+ val ofDim: N = "ofDim"
+ val origin: N = "origin"
+ val prefix : N = "prefix"
+ val productArity: N = "productArity"
+ val productElement: N = "productElement"
+ val productIterator: N = "productIterator"
+ val productPrefix: N = "productPrefix"
+ val readResolve: N = "readResolve"
+ val reflect : N = "reflect"
+ val reify : N = "reify"
+ val rootMirror : N = "rootMirror"
+ val runOrElse: N = "runOrElse"
+ val runtime: N = "runtime"
+ val runtimeClass: N = "runtimeClass"
+ val runtimeMirror: N = "runtimeMirror"
+ val sameElements: N = "sameElements"
+ val scala_ : N = "scala"
+ val selectDynamic: N = "selectDynamic"
+ val selectOverloadedMethod: N = "selectOverloadedMethod"
+ val selectTerm: N = "selectTerm"
+ val selectType: N = "selectType"
+ val self: N = "self"
+ val seqToArray: N = "seqToArray"
+ val setAccessible: N = "setAccessible"
+ val setAnnotations: N = "setAnnotations"
+ val setSymbol: N = "setSymbol"
+ val setType: N = "setType"
+ val setTypeSignature: N = "setTypeSignature"
+ val splice: N = "splice"
+ val staticClass : N = "staticClass"
+ val staticModule : N = "staticModule"
+ val staticPackage : N = "staticPackage"
+ val synchronized_ : N = "synchronized"
+ val tail: N = "tail"
+ val `then` : N = "then"
+ val this_ : N = "this"
+ val thisPrefix : N = "thisPrefix"
+ val throw_ : N = "throw"
+ val toArray: N = "toArray"
+ val toList: N = "toList"
+ val toObjectArray : N = "toObjectArray"
+ val toSeq: N = "toSeq"
+ val toString_ : N = "toString"
+ val toTypeConstructor: N = "toTypeConstructor"
+ val tpe : N = "tpe"
+ val tree : N = "tree"
+ val true_ : N = "true"
+ val typedProductIterator: N = "typedProductIterator"
+ val typeTagToManifest: N = "typeTagToManifest"
+ val unapply: N = "unapply"
+ val unapplySeq: N = "unapplySeq"
+ val unbox: N = "unbox"
+ val universe: N = "universe"
+ val update: N = "update"
+ val updateDynamic: N = "updateDynamic"
+ val value: N = "value"
+ val valueOf : N = "valueOf"
+ val values : N = "values"
+ val view_ : N = "view"
+ val wait_ : N = "wait"
+ val withFilter: N = "withFilter"
+ val withFilterIfRefutable: N = "withFilterIfRefutable$"
+ val wrap: N = "wrap"
+ val zero: N = "zero"
+ val zip: N = "zip"
+ val nothingRuntimeClass: N = "scala.runtime.Nothing$"
+ val nullRuntimeClass: N = "scala.runtime.Null$"
+
+ val synthSwitch: N = "$synthSwitch"
+
+ // unencoded operators
+ object raw {
+ final val AMP : N = "&"
+ final val BANG : N = "!"
+ final val BAR : N = "|"
+ final val DOLLAR: N = "$"
+ final val GE: N = ">="
+ final val LE: N = "<="
+ final val MINUS: N = "-"
+ final val NE: N = "!="
+ final val PLUS : N = "+"
+ final val SLASH: N = "/"
+ final val STAR : N = "*"
+ final val TILDE: N = "~"
+
+ final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG)
+ }
+
+ object specializedTypeNames {
+ final val Boolean: N = "Z"
+ final val Byte: N = "B"
+ final val Char: N = "C"
+ final val Short: N = "S"
+ final val Int: N = "I"
+ final val Long: N = "J"
+ final val Float: N = "F"
+ final val Double: N = "D"
+ final val Void: N = "V"
+ final val Object: N = "L"
+
+ final val prefix: N = "$m"
+ final val separator: N = "c"
+ final val suffix: N = "$sp"
+ }
+
+ // value-conversion methods
+ val toByte: N = "toByte"
+ val toShort: N = "toShort"
+ val toChar: N = "toChar"
+ val toInt: N = "toInt"
+ val toLong: N = "toLong"
+ val toFloat: N = "toFloat"
+ val toDouble: N = "toDouble"
+
+ // primitive operation methods for structural types mostly
+ // overlap with the above, but not for these two.
+ val toCharacter: N = "toCharacter"
+ val toInteger: N = "toInteger"
+
+ def newLazyValSlowComputeName(lzyValName: N) = lzyValName ++ LAZY_SLOW_SUFFIX
+
+ // ASCII names for operators
+ val ADD = encode("+")
+ val AND = encode("&")
+ val ASR = encode(">>")
+ val DIV = encode("/")
+ val EQ = encode("==")
+ val EQL = encode("=")
+ val GE = encode(">=")
+ val GT = encode(">")
+ val HASHHASH = encode("##")
+ val LE = encode("<=")
+ val LSL = encode("<<")
+ val LSR = encode(">>>")
+ val LT = encode("<")
+ val MINUS = encode("-")
+ val MOD = encode("%")
+ val MUL = encode("*")
+ val NE = encode("!=")
+ val OR = encode("|")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val SUB = MINUS // ... as does SUB with PLUS
+ val XOR = encode("^")
+ val ZAND = encode("&&")
+ val ZOR = encode("||")
+
+ // unary operators
+ val UNARY_PREFIX: N = "unary_"
+ val UNARY_~ = encode("unary_~")
+ val UNARY_+ = encode("unary_+")
+ val UNARY_- = encode("unary_-")
+ val UNARY_! = encode("unary_!")
+
+ // Grouped here so Cleanup knows what tests to perform.
+ val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
+ val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
+ val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
+ val NumberOpNames = (
+ Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
+ ++ Set(UNARY_+, UNARY_-, UNARY_!)
+ ++ ConversionNames
+ ++ CommonOpNames
+ )
+
+ val add: N = "add"
+ val complement: N = "complement"
+ val divide: N = "divide"
+ val multiply: N = "multiply"
+ val negate: N = "negate"
+ val positive: N = "positive"
+ val shiftLogicalRight: N = "shiftLogicalRight"
+ val shiftSignedLeft: N = "shiftSignedLeft"
+ val shiftSignedRight: N = "shiftSignedRight"
+ val subtract: N = "subtract"
+ val takeAnd: N = "takeAnd"
+ val takeConditionalAnd: N = "takeConditionalAnd"
+ val takeConditionalOr: N = "takeConditionalOr"
+ val takeModulo: N = "takeModulo"
+ val takeNot: N = "takeNot"
+ val takeOr: N = "takeOr"
+ val takeXor: N = "takeXor"
+ val testEqual: N = "testEqual"
+ val testGreaterOrEqualThan: N = "testGreaterOrEqualThan"
+ val testGreaterThan: N = "testGreaterThan"
+ val testLessOrEqualThan: N = "testLessOrEqualThan"
+ val testLessThan: N = "testLessThan"
+ val testNotEqual: N = "testNotEqual"
+
+ val isBoxedNumberOrBoolean: N = "isBoxedNumberOrBoolean"
+ val isBoxedNumber: N = "isBoxedNumber"
+
+ val reflPolyCacheName: N = "reflPoly$Cache"
+ val reflClassCacheName: N = "reflClass$Cache"
+ val reflParamsCacheName: N = "reflParams$Cache"
+ val reflMethodCacheName: N = "reflMethod$Cache"
+ val reflMethodName: N = "reflMethod$Method"
+
+ private val reflectionCacheNames = Set[N](
+ reflPolyCacheName,
+ reflClassCacheName,
+ reflParamsCacheName,
+ reflMethodCacheName,
+ reflMethodName
+ )
+
+ def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
+ }
+
+ class ScalaTermNames extends ScalaNames[TermName] {
+ protected implicit def fromString(s: String): TermName = termName(s)
+
+ @switch def syntheticParamName(i: Int): TermName = i match {
+ case 0 => x_0
+ case 1 => x_1
+ case 2 => x_2
+ case 3 => x_3
+ case 4 => x_4
+ case 5 => x_5
+ case 6 => x_6
+ case 7 => x_7
+ case 8 => x_8
+ case 9 => x_9
+ case _ => termName("x$" + i)
+ }
+
+ @switch def productAccessorName(j: Int): TermName = j match {
+ case 1 => nme._1
+ case 2 => nme._2
+ case 3 => nme._3
+ case 4 => nme._4
+ case 5 => nme._5
+ case 6 => nme._6
+ case 7 => nme._7
+ case 8 => nme._8
+ case 9 => nme._9
+ case 10 => nme._10
+ case 11 => nme._11
+ case 12 => nme._12
+ case 13 => nme._13
+ case 14 => nme._14
+ case 15 => nme._15
+ case 16 => nme._16
+ case 17 => nme._17
+ case 18 => nme._18
+ case 19 => nme._19
+ case 20 => nme._20
+ case 21 => nme._21
+ case 22 => nme._22
+ case _ => termName("_" + j)
+ }
+
+ def syntheticParamNames(num: Int): List[TermName] =
+ (0 until num).map(syntheticParamName)(breakOut)
+
+ def localDummyName(clazz: Symbol)(implicit ctx: Context): TermName =
+ LOCALDUMMY_PREFIX ++ clazz.name ++ ">"
+
+ def newBitmapName(bitmapPrefix: TermName, n: Int): TermName = bitmapPrefix ++ n.toString
+
+ def selectorName(n: Int): TermName = "_" + (n + 1)
+
+ object primitive {
+ val arrayApply: TermName = "[]apply"
+ val arrayUpdate: TermName = "[]update"
+ val arrayLength: TermName = "[]length"
+ val names: Set[Name] = Set(arrayApply, arrayUpdate, arrayLength)
+ }
+
+ def isPrimitiveName(name: Name) = primitive.names.contains(name)
+ }
+
+ class ScalaTypeNames extends ScalaNames[TypeName] {
+ protected implicit def fromString(s: String): TypeName = typeName(s)
+
+ def syntheticTypeParamName(i: Int): TypeName = "X" + i
+
+ def syntheticTypeParamNames(num: Int): List[TypeName] =
+ (0 until num).map(syntheticTypeParamName)(breakOut)
+
+ final val Conforms = encode("<:<")
+
+ final val Uninstantiated: TypeName = "?$"
+ }
+
+ abstract class JavaNames[N <: Name] extends DefinedNames[N] {
+ final val ABSTRACTkw: N = kw("abstract")
+ final val ASSERTkw: N = kw("assert")
+ final val BOOLEANkw: N = kw("boolean")
+ final val BREAKkw: N = kw("break")
+ final val BYTEkw: N = kw("byte")
+ final val CASEkw: N = kw("case")
+ final val CATCHkw: N = kw("catch")
+ final val CHARkw: N = kw("char")
+ final val CLASSkw: N = kw("class")
+ final val CONSTkw: N = kw("const")
+ final val CONTINUEkw: N = kw("continue")
+ final val DEFAULTkw: N = kw("default")
+ final val DOkw: N = kw("do")
+ final val DOUBLEkw: N = kw("double")
+ final val ELSEkw: N = kw("else")
+ final val ENUMkw: N = kw("enum")
+ final val EXTENDSkw: N = kw("extends")
+ final val FINALkw: N = kw("final")
+ final val FINALLYkw: N = kw("finally")
+ final val FLOATkw: N = kw("float")
+ final val FORkw: N = kw("for")
+ final val IFkw: N = kw("if")
+ final val GOTOkw: N = kw("goto")
+ final val IMPLEMENTSkw: N = kw("implements")
+ final val IMPORTkw: N = kw("import")
+ final val INSTANCEOFkw: N = kw("instanceof")
+ final val INTkw: N = kw("int")
+ final val INTERFACEkw: N = kw("interface")
+ final val LONGkw: N = kw("long")
+ final val NATIVEkw: N = kw("native")
+ final val NEWkw: N = kw("new")
+ final val PACKAGEkw: N = kw("package")
+ final val PRIVATEkw: N = kw("private")
+ final val PROTECTEDkw: N = kw("protected")
+ final val PUBLICkw: N = kw("public")
+ final val RETURNkw: N = kw("return")
+ final val SHORTkw: N = kw("short")
+ final val STATICkw: N = kw("static")
+ final val STRICTFPkw: N = kw("strictfp")
+ final val SUPERkw: N = kw("super")
+ final val SWITCHkw: N = kw("switch")
+ final val SYNCHRONIZEDkw: N = kw("synchronized")
+ final val THISkw: N = kw("this")
+ final val THROWkw: N = kw("throw")
+ final val THROWSkw: N = kw("throws")
+ final val TRANSIENTkw: N = kw("transient")
+ final val TRYkw: N = kw("try")
+ final val VOIDkw: N = kw("void")
+ final val VOLATILEkw: N = kw("volatile")
+ final val WHILEkw: N = kw("while")
+
+ final val BoxedBoolean: N = "java.lang.Boolean"
+ final val BoxedByte: N = "java.lang.Byte"
+ final val BoxedCharacter: N = "java.lang.Character"
+ final val BoxedDouble: N = "java.lang.Double"
+ final val BoxedFloat: N = "java.lang.Float"
+ final val BoxedInteger: N = "java.lang.Integer"
+ final val BoxedLong: N = "java.lang.Long"
+ final val BoxedNumber: N = "java.lang.Number"
+ final val BoxedShort: N = "java.lang.Short"
+ final val Class: N = "java.lang.Class"
+ final val IOOBException: N = "java.lang.IndexOutOfBoundsException"
+ final val InvTargetException: N = "java.lang.reflect.InvocationTargetException"
+ final val MethodAsObject: N = "java.lang.reflect.Method"
+ final val NPException: N = "java.lang.NullPointerException"
+ final val Object: N = "java.lang.Object"
+ final val String: N = "java.lang.String"
+ final val Throwable: N = "java.lang.Throwable"
+
+ final val ForName: N = "forName"
+ final val GetCause: N = "getCause"
+ final val GetClass: N = "getClass"
+ final val GetClassLoader: N = "getClassLoader"
+ final val GetComponentType: N = "getComponentType"
+ final val GetMethod: N = "getMethod"
+ final val Invoke: N = "invoke"
+ final val JavaLang: N = "java.lang"
+
+ final val BeanProperty: N = "scala.beans.BeanProperty"
+ final val BooleanBeanProperty: N = "scala.beans.BooleanBeanProperty"
+ final val JavaSerializable: N = "java.io.Serializable"
+ }
+
+ class JavaTermNames extends JavaNames[TermName] {
+ protected def fromString(s: String): TermName = termName(s)
+ }
+ class JavaTypeNames extends JavaNames[TypeName] {
+ protected def fromString(s: String): TypeName = typeName(s)
+ }
+
+ val nme = new ScalaTermNames
+ val tpnme = new ScalaTypeNames
+ val jnme = new JavaTermNames
+ val jtpnme = new JavaTypeNames
+
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala
new file mode 100644
index 000000000..23683608a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala
@@ -0,0 +1,306 @@
+package dotty.tools.dotc.core
+
+import Types._, Symbols._, Contexts._, Names._
+
+/** Substitution operations on types. See the corresponding `subst` and
+ * `substThis` methods on class Type for an explanation.
+ */
+trait Substituters { this: Context =>
+
+ final def subst(tp: Type, from: BindingType, to: BindingType, theMap: SubstBindingMap): Type =
+ tp match {
+ case tp: BoundType =>
+ if (tp.binder eq from) tp.copyBoundType(to.asInstanceOf[tp.BT]) else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(subst(tp.prefix, from, to, theMap))
+ case _: ThisType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst(tp.parent, from, to, theMap), tp.refinedName, subst(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstBindingMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (sym eq from) return to
+ if (sym.isStatic && !from.isStatic) tp
+ else tp.derivedSelect(subst1(tp.prefix, from, to, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst1(tp.parent, from, to, theMap), tp.refinedName, subst1(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst1(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new Subst1Map(from, to))
+ .mapOver(tp)
+ }
+ }
+
+ final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (sym eq from1) return to1
+ if (sym eq from2) return to2
+ if (sym.isStatic && !from1.isStatic && !from2.isStatic) tp
+ else tp.derivedSelect(subst2(tp.prefix, from1, to1, from2, to2, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst2(tp.parent, from1, to1, from2, to2, theMap), tp.refinedName, subst2(tp.refinedInfo, from1, to1, from2, to2, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst2(tp.alias, from1, to1, from2, to2, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new Subst2Map(from1, to1, from2, to2))
+ .mapOver(tp)
+ }
+ }
+
+ final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym) return ts.head
+ fs = fs.tail
+ ts = ts.tail
+ }
+ if (sym.isStatic && !existsStatic(from)) tp
+ else tp.derivedSelect(subst(tp.prefix, from, to, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst(tp.parent, from, to, theMap), tp.refinedName, subst(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstMap(from, to))
+ .mapOver(tp)
+ }
+ }
+
+ final def substDealias(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstDealiasMap): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym) return ts.head
+ fs = fs.tail
+ ts = ts.tail
+ }
+ if (sym.isStatic && !existsStatic(from)) tp
+ else {
+ tp.info match {
+ case TypeAlias(alias) =>
+ val alias1 = substDealias(alias, from, to, theMap)
+ if (alias1 ne alias) return alias1
+ case _ =>
+ }
+ tp.derivedSelect(substDealias(tp.prefix, from, to, theMap))
+ }
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substDealias(tp.parent, from, to, theMap), tp.refinedName, substDealias(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substDealias(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstDealiasMap(from, to))
+ .mapOver(tp)
+ }
+ }
+
+ final def substSym(tp: Type, from: List[Symbol], to: List[Symbol], theMap: SubstSymMap): Type =
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym)
+ return tp match {
+ case tp: WithFixedSym => NamedType.withFixedSym(tp.prefix, ts.head)
+ case _ => substSym(tp.prefix, from, to, theMap) select ts.head
+ }
+ fs = fs.tail
+ ts = ts.tail
+ }
+ if (sym.isStatic && !existsStatic(from)) tp
+ else tp.derivedSelect(substSym(tp.prefix, from, to, theMap))
+ case tp: ThisType =>
+ val sym = tp.cls
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym) return ts.head.asClass.thisType
+ fs = fs.tail
+ ts = ts.tail
+ }
+ tp
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substSym(tp.parent, from, to, theMap), tp.refinedName, substSym(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substSym(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstSymMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substThis(tp: Type, from: ClassSymbol, to: Type, theMap: SubstThisMap): Type =
+ tp match {
+ case tp: ThisType =>
+ if (tp.cls eq from) to else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStaticOwner) tp
+ else tp.derivedSelect(substThis(tp.prefix, from, to, theMap))
+ case _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substThis(tp.parent, from, to, theMap), tp.refinedName, substThis(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substThis(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstThisMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap): Type =
+ tp match {
+ case tp @ RecThis(binder) =>
+ if (binder eq from) to else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(substRecThis(tp.prefix, from, to, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substRecThis(tp.parent, from, to, theMap), tp.refinedName, substRecThis(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substRecThis(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstRecThisMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substParam(tp: Type, from: ParamType, to: Type, theMap: SubstParamMap): Type =
+ tp match {
+ case tp: BoundType =>
+ if (tp == from) to else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(substParam(tp.prefix, from, to, theMap))
+ case _: ThisType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substParam(tp.parent, from, to, theMap), tp.refinedName, substParam(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substParam(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstParamMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substParams(tp: Type, from: BindingType, to: List[Type], theMap: SubstParamsMap): Type =
+ tp match {
+ case tp: ParamType =>
+ if (tp.binder == from) to(tp.paramNum) else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(substParams(tp.prefix, from, to, theMap))
+ case _: ThisType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substParams(tp.parent, from, to, theMap), tp.refinedName, substParams(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substParams(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstParamsMap(from, to))
+ .mapOver(tp)
+ }
+
+ private def existsStatic(syms: List[Symbol]): Boolean = syms match {
+ case sym :: syms1 => sym.isStatic || existsStatic(syms1)
+ case nil => false
+ }
+
+ final class SubstBindingMap(from: BindingType, to: BindingType) extends DeepTypeMap {
+ def apply(tp: Type) = subst(tp, from, to, this)
+ }
+
+ final class Subst1Map(from: Symbol, to: Type) extends DeepTypeMap {
+ def apply(tp: Type) = subst1(tp, from, to, this)
+ }
+
+ final class Subst2Map(from1: Symbol, to1: Type, from2: Symbol, to2: Type) extends DeepTypeMap {
+ def apply(tp: Type) = subst2(tp, from1, to1, from2, to2, this)
+ }
+
+ final class SubstMap(from: List[Symbol], to: List[Type]) extends DeepTypeMap {
+ def apply(tp: Type): Type = subst(tp, from, to, this)
+ }
+
+ final class SubstDealiasMap(from: List[Symbol], to: List[Type]) extends DeepTypeMap {
+ override def apply(tp: Type): Type = substDealias(tp, from, to, this)
+ }
+
+ final class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends DeepTypeMap {
+ def apply(tp: Type): Type = substSym(tp, from, to, this)
+ }
+
+ final class SubstThisMap(from: ClassSymbol, to: Type) extends DeepTypeMap {
+ def apply(tp: Type): Type = substThis(tp, from, to, this)
+ }
+
+ final class SubstRecThisMap(from: Type, to: Type) extends DeepTypeMap {
+ def apply(tp: Type): Type = substRecThis(tp, from, to, this)
+ }
+
+ final class SubstParamMap(from: ParamType, to: Type) extends DeepTypeMap {
+ def apply(tp: Type) = substParam(tp, from, to, this)
+ }
+
+ final class SubstParamsMap(from: BindingType, to: List[Type]) extends DeepTypeMap {
+ def apply(tp: Type) = substParams(tp, from, to, this)
+ }
+
+ /** A map for "cycle safe substitutions" which do not force the denotation
+ * of a TypeRef unless the name matches up with one of the substituted symbols.
+ */
+ final class SafeSubstMap(from: List[Symbol], to: List[Type]) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case tp: NamedType =>
+ try {
+ var sym: Symbol = null
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head.name == tp.name) {
+ if (sym == null) sym = tp.symbol
+ if (fs.head eq sym) return ts.head
+ }
+ fs = fs.tail
+ ts = ts.tail
+ }
+ tp.newLikeThis(apply(tp.prefix))
+ }
+ catch {
+ case ex: CyclicReference => tp.derivedSelect(apply(tp.prefix))
+ }
+ case _ => mapOver(tp)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
new file mode 100644
index 000000000..8b7c28e19
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -0,0 +1,2004 @@
+package dotty.tools
+package dotc
+package core
+
+import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._
+import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._, Comments._
+import NameOps._
+import Scopes.Scope
+import collection.mutable
+import collection.immutable.BitSet
+import scala.reflect.io.AbstractFile
+import Decorators.SymbolIteratorDecorator
+import ast._
+import annotation.tailrec
+import CheckRealizable._
+import util.SimpleMap
+import util.Stats
+import config.Config
+import config.Printers.{completions, incremental, noPrinter}
+
+trait SymDenotations { this: Context =>
+ import SymDenotations._
+
+ /** Factory method for SymDenotion creation. All creations
+ * should be done via this method.
+ */
+ def SymDenotation(
+ symbol: Symbol,
+ owner: Symbol,
+ name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol = NoSymbol)(implicit ctx: Context): SymDenotation = {
+ val result =
+ if (symbol.isClass)
+ if (initFlags is Package) new PackageClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin, ctx.runId)
+ else new ClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin, ctx.runId)
+ else new SymDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin)
+ result.validFor = stablePeriod
+ result
+ }
+
+ def stillValid(denot: SymDenotation): Boolean =
+ if (denot.is(ValidForever) || denot.isRefinementClass || denot.isImport) true
+ else {
+ val initial = denot.initial
+ val firstPhaseId = initial.validFor.firstPhaseId.max(ctx.typerPhase.id)
+ if ((initial ne denot) || ctx.phaseId != firstPhaseId)
+ ctx.withPhase(firstPhaseId).stillValidInOwner(initial)
+ else
+ stillValidInOwner(denot)
+ }
+
+ private[SymDenotations] def stillValidInOwner(denot: SymDenotation): Boolean = try {
+ val owner = denot.owner.denot
+ stillValid(owner) && (
+ !owner.isClass
+ || owner.isRefinementClass
+ || owner.is(Scala2x)
+ || (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol)
+ || denot.isSelfSym)
+ } catch {
+ case ex: StaleSymbol => false
+ }
+
+ /** Explain why symbol is invalid; used for debugging only */
+ def traceInvalid(denot: Denotation): Boolean = {
+ def show(d: Denotation) = s"$d#${d.symbol.id}"
+ def explain(msg: String) = {
+ println(s"${show(denot)} is invalid at ${this.period} because $msg")
+ false
+ }
+ denot match {
+ case denot: SymDenotation =>
+ def explainSym(msg: String) = explain(s"$msg\n defined = ${denot.definedPeriodsString}")
+ if (denot.is(ValidForever) || denot.isRefinementClass) true
+ else {
+ implicit val ctx: Context = this
+ val initial = denot.initial
+ if ((initial ne denot) || ctx.phaseId != initial.validFor.firstPhaseId) {
+ ctx.withPhase(initial.validFor.firstPhaseId).traceInvalid(initial)
+ } else try {
+ val owner = denot.owner.denot
+ if (!traceInvalid(owner)) explainSym("owner is invalid")
+ else if (!owner.isClass || owner.isRefinementClass || denot.isSelfSym) true
+ else if (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol) true
+ else explainSym(s"decls of ${show(owner)} are ${owner.unforcedDecls.lookupAll(denot.name).toList}, do not contain ${denot.symbol}")
+ } catch {
+ case ex: StaleSymbol => explainSym(s"$ex was thrown")
+ }
+ }
+ case _ =>
+ explain("denotation is not a SymDenotation")
+ }
+ }
+}
+
+object SymDenotations {
+
+ /** A sym-denotation represents the contents of a definition
+ * during a period.
+ */
+ class SymDenotation private[SymDenotations] (
+ symbol: Symbol,
+ ownerIfExists: Symbol,
+ final val name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol) {
+
+ //assert(symbol.id != 4940, name)
+
+ override def hasUniqueSym: Boolean = exists
+
+ /** Debug only
+ override def validFor_=(p: Period) = {
+ super.validFor_=(p)
+ }
+ */
+ if (Config.checkNoSkolemsInInfo) assertNoSkolems(initInfo)
+
+ // ------ Getting and setting fields -----------------------------
+
+ private[this] var myFlags: FlagSet = adaptFlags(initFlags)
+ private[this] var myInfo: Type = initInfo
+ private[this] var myPrivateWithin: Symbol = initPrivateWithin
+ private[this] var myAnnotations: List[Annotation] = Nil
+
+ /** The owner of the symbol; overridden in NoDenotation */
+ def owner: Symbol = ownerIfExists
+
+ /** Same as owner, except returns NoSymbol for NoSymbol */
+ def maybeOwner: Symbol = if (exists) owner else NoSymbol
+
+ /** The flag set */
+ final def flags(implicit ctx: Context): FlagSet = { ensureCompleted(); myFlags }
+
+ /** The flag set without forcing symbol completion.
+ * Should be used only for printing.
+ */
+ private[dotc] final def flagsUNSAFE = myFlags
+
+ /** Adapt flag set to this denotation's term or type nature */
+ private def adaptFlags(flags: FlagSet) = if (isType) flags.toTypeFlags else flags.toTermFlags
+
+ /** Update the flag set */
+ final def flags_=(flags: FlagSet): Unit =
+ myFlags = adaptFlags(flags)
+
+ /** Set given flags(s) of this denotation */
+ final def setFlag(flags: FlagSet): Unit = { myFlags |= flags }
+
+ /** Unset given flags(s) of this denotation */
+ final def resetFlag(flags: FlagSet): Unit = { myFlags &~= flags }
+
+ /** Set applicable flags from `flags` which is a subset of {NoInits, PureInterface} */
+ final def setApplicableFlags(flags: FlagSet): Unit = {
+ val mask = if (myFlags.is(Trait)) NoInitsInterface else NoInits
+ setFlag(flags & mask)
+ }
+
+ /** Has this denotation one of the flags in `fs` set? */
+ final def is(fs: FlagSet)(implicit ctx: Context) = {
+ (if (fs <= FromStartFlags) myFlags else flags) is fs
+ }
+
+ /** Has this denotation one of the flags in `fs` set, whereas none of the flags
+ * in `butNot` are set?
+ */
+ final def is(fs: FlagSet, butNot: FlagSet)(implicit ctx: Context) =
+ (if (fs <= FromStartFlags && butNot <= FromStartFlags) myFlags else flags) is (fs, butNot)
+
+ /** Has this denotation all of the flags in `fs` set? */
+ final def is(fs: FlagConjunction)(implicit ctx: Context) =
+ (if (fs <= FromStartFlags) myFlags else flags) is fs
+
+ /** Has this denotation all of the flags in `fs` set, whereas none of the flags
+ * in `butNot` are set?
+ */
+ final def is(fs: FlagConjunction, butNot: FlagSet)(implicit ctx: Context) =
+ (if (fs <= FromStartFlags && butNot <= FromStartFlags) myFlags else flags) is (fs, butNot)
+
+ /** The type info.
+ * The info is an instance of TypeType iff this is a type denotation
+ * Uncompleted denotations set myInfo to a LazyType.
+ */
+ final def info(implicit ctx: Context): Type = myInfo match {
+ case myInfo: LazyType => completeFrom(myInfo); info
+ case _ => myInfo
+ }
+
+ /** The type info, or, if symbol is not yet completed, the completer */
+ final def infoOrCompleter = myInfo
+
+ /** Optionally, the info if it is completed */
+ final def unforcedInfo: Option[Type] = myInfo match {
+ case myInfo: LazyType => None
+ case _ => Some(myInfo)
+ }
+
+ private def completeFrom(completer: LazyType)(implicit ctx: Context): Unit = {
+ if (completions ne noPrinter) {
+ completions.println(i"${" " * indent}completing ${if (isType) "type" else "val"} $name")
+ indent += 1
+ }
+ if (myFlags is Touched) throw CyclicReference(this)
+ myFlags |= Touched
+
+ // completions.println(s"completing ${this.debugString}")
+ try completer.complete(this)(ctx.withPhase(validFor.firstPhaseId))
+ catch {
+ case ex: CyclicReference =>
+ completions.println(s"error while completing ${this.debugString}")
+ throw ex
+ }
+ finally
+ if (completions ne noPrinter) {
+ indent -= 1
+ completions.println(i"${" " * indent}completed $name in $owner")
+ }
+ // completions.println(s"completed ${this.debugString}")
+ }
+
+ protected[dotc] def info_=(tp: Type) = {
+ /* // DEBUG
+ def illegal: String = s"illegal type for $this: $tp"
+ if (this is Module) // make sure module invariants that allow moduleClass and sourceModule to work are kept.
+ tp match {
+ case tp: ClassInfo => assert(tp.selfInfo.isInstanceOf[TermRefBySym], illegal)
+ case tp: NamedType => assert(tp.isInstanceOf[TypeRefBySym], illegal)
+ case tp: ExprType => assert(tp.resultType.isInstanceOf[TypeRefBySym], illegal)
+ case _ =>
+ }
+ */
+ if (Config.checkNoSkolemsInInfo) assertNoSkolems(tp)
+ myInfo = tp
+ }
+
+ /** The name, except
+ * - if this is a module class, strip the module class suffix
+ * - if this is a companion object with a clash-avoiding name, strip the
+ * "avoid clash" suffix
+ */
+ def effectiveName(implicit ctx: Context) =
+ if (this is ModuleClass) name.stripModuleClassSuffix
+ else name.stripAvoidClashSuffix
+
+ /** The privateWithin boundary, NoSymbol if no boundary is given.
+ */
+ final def privateWithin(implicit ctx: Context): Symbol = { ensureCompleted(); myPrivateWithin }
+
+ /** Set privateWithin. */
+ protected[core] final def privateWithin_=(sym: Symbol): Unit =
+ myPrivateWithin = sym
+
+ /** The annotations of this denotation */
+ final def annotations(implicit ctx: Context): List[Annotation] = {
+ ensureCompleted(); myAnnotations
+ }
+
+ /** Update the annotations of this denotation */
+ private[core] final def annotations_=(annots: List[Annotation]): Unit =
+ myAnnotations = annots
+
+ /** Does this denotation have an annotation matching the given class symbol? */
+ final def hasAnnotation(cls: Symbol)(implicit ctx: Context) =
+ dropOtherAnnotations(annotations, cls).nonEmpty
+
+ /** Apply transform `f` to all annotations of this denotation */
+ final def transformAnnotations(f: Annotation => Annotation)(implicit ctx: Context): Unit =
+ annotations = annotations.mapConserve(f)
+
+ /** Keep only those annotations that satisfy `p` */
+ final def filterAnnotations(p: Annotation => Boolean)(implicit ctx: Context): Unit =
+ annotations = annotations.filterConserve(p)
+
+ /** Optionally, the annotation matching the given class symbol */
+ final def getAnnotation(cls: Symbol)(implicit ctx: Context): Option[Annotation] =
+ dropOtherAnnotations(annotations, cls) match {
+ case annot :: _ => Some(annot)
+ case nil => None
+ }
+
+ /** The same as getAnnotation, but without ensuring
+ * that the symbol carrying the annotation is completed
+ */
+ final def unforcedAnnotation(cls: Symbol)(implicit ctx: Context): Option[Annotation] =
+ dropOtherAnnotations(myAnnotations, cls) match {
+ case annot :: _ => Some(annot)
+ case nil => None
+ }
+
+ /** Add given annotation to the annotations of this denotation */
+ final def addAnnotation(annot: Annotation): Unit =
+ annotations = annot :: myAnnotations
+
+ /** Remove annotation with given class from this denotation */
+ final def removeAnnotation(cls: Symbol)(implicit ctx: Context): Unit =
+ annotations = myAnnotations.filterNot(_ matches cls)
+
+ /** Remove any annotations with same class as `annot`, and add `annot` */
+ final def updateAnnotation(annot: Annotation)(implicit ctx: Context): Unit = {
+ removeAnnotation(annot.symbol)
+ addAnnotation(annot)
+ }
+
+ /** Add all given annotations to this symbol */
+ final def addAnnotations(annots: TraversableOnce[Annotation])(implicit ctx: Context): Unit =
+ annots.foreach(addAnnotation)
+
+ @tailrec
+ private def dropOtherAnnotations(anns: List[Annotation], cls: Symbol)(implicit ctx: Context): List[Annotation] = anns match {
+ case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
+ case Nil => Nil
+ }
+
+ /** The denotation is completed: info is not a lazy type and attributes have defined values */
+ final def isCompleted: Boolean = !myInfo.isInstanceOf[LazyType]
+
+ /** The denotation is in train of being completed */
+ final def isCompleting: Boolean = (myFlags is Touched) && !isCompleted
+
+ /** The completer of this denotation. @pre: Denotation is not yet completed */
+ final def completer: LazyType = myInfo.asInstanceOf[LazyType]
+
+ /** Make sure this denotation is completed */
+ final def ensureCompleted()(implicit ctx: Context): Unit = info
+
+ /** The symbols defined in this class or object.
+ * Careful! This does not force the type, so is compilation order dependent.
+ * This method should be used only in the following circumstances:
+ *
+ * 1. When accessing type parameters or type parameter accessors (both are entered before
+ * completion).
+ * 2. When obtaining the current scope in order to enter, rename or delete something there.
+ * 3. When playing it safe in order not to raise CylicReferences, e.g. for printing things
+ * or taking more efficient shortcuts (e.g. the stillValid test).
+ */
+ final def unforcedDecls(implicit ctx: Context): Scope = myInfo match {
+ case cinfo: LazyType =>
+ val knownDecls = cinfo.decls
+ if (knownDecls ne EmptyScope) knownDecls
+ else { completeFrom(cinfo); unforcedDecls } // complete-once
+ case _ => info.decls
+ }
+
+ /** If this is a package class, the symbols entered in it
+ * before it is completed. (this is needed to eagerly enter synthetic
+ * aliases such as AnyRef into a package class without forcing it.
+ * Right now, the only usage is for the AnyRef alias in Definitions.
+ */
+ final private[core] def currentPackageDecls(implicit ctx: Context): MutableScope = myInfo match {
+ case pinfo: SymbolLoaders # PackageLoader => pinfo.currentDecls
+ case _ => unforcedDecls.openForMutations
+ }
+
+ // ------ Names ----------------------------------------------
+
+ /** The expanded name of this denotation. */
+ final def expandedName(implicit ctx: Context) =
+ if (is(ExpandedName) || isConstructor) name
+ else {
+ def legalize(name: Name): Name = // JVM method names may not contain `<' or `>' characters
+ if (is(Method)) name.replace('<', '(').replace('>', ')') else name
+ legalize(name.expandedName(initial.owner))
+ }
+ // need to use initial owner to disambiguate, as multiple private symbols with the same name
+ // might have been moved from different origins into the same class
+
+ /** The name with which the denoting symbol was created */
+ final def originalName(implicit ctx: Context) = {
+ val d = initial
+ if (d is ExpandedName) d.name.unexpandedName else d.name // !!!DEBUG, was: effectiveName
+ }
+
+ /** The encoded full path name of this denotation, where outer names and inner names
+ * are separated by `separator` strings.
+ * Never translates expansions of operators back to operator symbol.
+ * Drops package objects. Represents terms in the owner chain by a simple `~`.
+ * (Note: scalac uses nothing to represent terms, which can cause name clashes
+ * between same-named definitions in different enclosing methods. Before this commit
+ * we used `$' but this can cause ambiguities with the class separator '$').
+ * A separator "" means "flat name"; the real separator in this case is "$" and
+ * enclosing packages do not form part of the name.
+ */
+ def fullNameSeparated(separator: String)(implicit ctx: Context): Name = {
+ var sep = separator
+ var stopAtPackage = false
+ if (sep.isEmpty) {
+ sep = "$"
+ stopAtPackage = true
+ }
+ if (symbol == NoSymbol ||
+ owner == NoSymbol ||
+ owner.isEffectiveRoot ||
+ stopAtPackage && owner.is(PackageClass)) name
+ else {
+ var encl = owner
+ while (!encl.isClass && !encl.isPackageObject) {
+ encl = encl.owner
+ sep += "~"
+ }
+ if (owner.is(ModuleClass, butNot = Package) && sep == "$") sep = "" // duplicate scalac's behavior: don't write a double '$$' for module class members.
+ val fn = encl.fullNameSeparated(separator) ++ sep ++ name
+ if (isType) fn.toTypeName else fn.toTermName
+ }
+ }
+
+ /** The encoded flat name of this denotation, where joined names are separated by `separator` characters. */
+ def flatName(implicit ctx: Context): Name = fullNameSeparated("")
+
+ /** `fullName` where `.' is the separator character */
+ def fullName(implicit ctx: Context): Name = fullNameSeparated(".")
+
+ // ----- Tests -------------------------------------------------
+
+ /** Is this denotation a type? */
+ override def isType: Boolean = name.isTypeName
+
+ /** Is this denotation a class? */
+ final def isClass: Boolean = isInstanceOf[ClassDenotation]
+
+ /** Is this denotation a non-trait class? */
+ final def isRealClass(implicit ctx: Context) = isClass && !is(Trait)
+
+ /** Cast to class denotation */
+ final def asClass: ClassDenotation = asInstanceOf[ClassDenotation]
+
+ /** is this symbol the result of an erroneous definition? */
+ def isError: Boolean = false
+
+ /** Make denotation not exist */
+ final def markAbsent(): Unit =
+ myInfo = NoType
+
+ /** Is symbol known to not exist? */
+ final def isAbsent(implicit ctx: Context): Boolean =
+ myInfo == NoType ||
+ (this is (ModuleVal, butNot = Package)) && moduleClass.isAbsent
+
+ /** Is this symbol the root class or its companion object? */
+ final def isRoot: Boolean =
+ (name.toTermName == nme.ROOT || name == nme.ROOTPKG) && (owner eq NoSymbol)
+
+ /** Is this symbol the empty package class or its companion object? */
+ final def isEmptyPackage(implicit ctx: Context): Boolean =
+ name.toTermName == nme.EMPTY_PACKAGE && owner.isRoot
+
+ /** Is this symbol the empty package class or its companion object? */
+ final def isEffectiveRoot(implicit ctx: Context) = isRoot || isEmptyPackage
+
+ /** Is this symbol an anonymous class? */
+ final def isAnonymousClass(implicit ctx: Context): Boolean =
+ isClass && (initial.name startsWith tpnme.ANON_CLASS)
+
+ final def isAnonymousFunction(implicit ctx: Context) =
+ this.symbol.is(Method) && (initial.name startsWith nme.ANON_FUN)
+
+ final def isAnonymousModuleVal(implicit ctx: Context) =
+ this.symbol.is(ModuleVal) && (initial.name startsWith nme.ANON_CLASS)
+
+ /** Is this a companion class method or companion object method?
+ * These methods are generated by Symbols#synthesizeCompanionMethod
+ * and used in SymDenotations#companionClass and
+ * SymDenotations#companionModule .
+ */
+ final def isCompanionMethod(implicit ctx: Context) =
+ name.toTermName == nme.COMPANION_CLASS_METHOD ||
+ name.toTermName == nme.COMPANION_MODULE_METHOD
+
+ /** Is this a syntetic method that represents conversions between representations of a value class
+ * These methods are generated in ExtensionMethods
+ * and used in ElimErasedValueType.
+ */
+ final def isValueClassConvertMethod(implicit ctx: Context) =
+ name.toTermName == nme.U2EVT ||
+ name.toTermName == nme.EVT2U
+
+ /** Is symbol a primitive value class? */
+ def isPrimitiveValueClass(implicit ctx: Context) =
+ maybeOwner == defn.ScalaPackageClass && defn.ScalaValueClasses().contains(symbol)
+
+ /** Is symbol a primitive numeric value class? */
+ def isNumericValueClass(implicit ctx: Context) =
+ maybeOwner == defn.ScalaPackageClass && defn.ScalaNumericValueClasses().contains(symbol)
+
+ /** Is symbol a phantom class for which no runtime representation exists? */
+ def isPhantomClass(implicit ctx: Context) = defn.PhantomClasses contains symbol
+
+ /** Is this symbol a class representing a refinement? These classes
+ * are used only temporarily in Typer and Unpickler as an intermediate
+ * step for creating Refinement types.
+ */
+ final def isRefinementClass(implicit ctx: Context): Boolean =
+ name.decode == tpnme.REFINE_CLASS
+
+ /** Is this symbol a package object or its module class? */
+ def isPackageObject(implicit ctx: Context): Boolean = {
+ val poName = if (isType) nme.PACKAGE_CLS else nme.PACKAGE
+ (name.toTermName == poName) && (owner is Package) && (this is Module)
+ }
+
+ /** Is this symbol an abstract type? */
+ final def isAbstractType(implicit ctx: Context) = isType && (this is Deferred)
+
+ /** Is this symbol an alias type? */
+ final def isAliasType(implicit ctx: Context) = isAbstractOrAliasType && !(this is Deferred)
+
+ /** Is this symbol an abstract or alias type? */
+ final def isAbstractOrAliasType = isType & !isClass
+
+ /** Is this the denotation of a self symbol of some class?
+ * This is the case if one of two conditions holds:
+ * 1. It is the symbol referred to in the selfInfo part of the ClassInfo
+ * which is the type of this symbol's owner.
+ * 2. This symbol is owned by a class, it's selfInfo field refers to a type
+ * (indicating the self definition does not introduce a name), and the
+ * symbol's name is "_".
+ * TODO: Find a more robust way to characterize self symbols, maybe by
+ * spending a Flag on them?
+ */
+ final def isSelfSym(implicit ctx: Context) = owner.infoOrCompleter match {
+ case ClassInfo(_, _, _, _, selfInfo) =>
+ selfInfo == symbol ||
+ selfInfo.isInstanceOf[Type] && name == nme.WILDCARD
+ case _ => false
+ }
+
+ /** Is this definition contained in `boundary`?
+ * Same as `ownersIterator contains boundary` but more efficient.
+ */
+ final def isContainedIn(boundary: Symbol)(implicit ctx: Context): Boolean = {
+ def recur(sym: Symbol): Boolean =
+ if (sym eq boundary) true
+ else if (sym eq NoSymbol) false
+ else if ((sym is PackageClass) && !(boundary is PackageClass)) false
+ else recur(sym.owner)
+ recur(symbol)
+ }
+
+ final def isProperlyContainedIn(boundary: Symbol)(implicit ctx: Context): Boolean =
+ symbol != boundary && isContainedIn(boundary)
+
+ /** Is this denotation static (i.e. with no outer instance)? */
+ final def isStatic(implicit ctx: Context) =
+ (this is JavaStatic) || this.exists && owner.isStaticOwner || this.isRoot
+
+ /** Is this a package class or module class that defines static symbols? */
+ final def isStaticOwner(implicit ctx: Context): Boolean =
+ (this is PackageClass) || (this is ModuleClass) && isStatic
+
+ /** Is this denotation defined in the same scope and compilation unit as that symbol? */
+ final def isCoDefinedWith(that: Symbol)(implicit ctx: Context) =
+ (this.effectiveOwner == that.effectiveOwner) &&
+ ( !(this.effectiveOwner is PackageClass)
+ || this.isAbsent || that.isAbsent
+ || { // check if they are defined in the same file(or a jar)
+ val thisFile = this.symbol.associatedFile
+ val thatFile = that.symbol.associatedFile
+ ( thisFile == null
+ || thatFile == null
+ || thisFile.path == thatFile.path // Cheap possibly wrong check, then expensive normalization
+ || thisFile.canonicalPath == thatFile.canonicalPath
+ )
+ }
+ )
+
+ /** Is this a denotation of a stable term (or an arbitrary type)? */
+ final def isStable(implicit ctx: Context) =
+ isType || is(Stable) || !(is(UnstableValue) || info.isInstanceOf[ExprType])
+
+ /** Is this a "real" method? A real method is a method which is:
+ * - not an accessor
+ * - not a label
+ * - not an anonymous function
+ * - not a companion method
+ */
+ final def isRealMethod(implicit ctx: Context) =
+ this.is(Method, butNot = AccessorOrLabel) &&
+ !isAnonymousFunction &&
+ !isCompanionMethod
+
+ /** Is this a getter? */
+ final def isGetter(implicit ctx: Context) =
+ (this is Accessor) && !originalName.isSetterName && !originalName.isScala2LocalSuffix
+
+ /** Is this a setter? */
+ final def isSetter(implicit ctx: Context) =
+ (this is Accessor) &&
+ originalName.isSetterName &&
+ (!isCompleted || info.firstParamTypes.nonEmpty) // to avoid being fooled by var x_= : Unit = ...
+
+ /** is this a symbol representing an import? */
+ final def isImport = name == nme.IMPORT
+
+ /** is this the constructor of a class? */
+ final def isClassConstructor = name == nme.CONSTRUCTOR
+
+ /** Is this the constructor of a trait? */
+ final def isImplClassConstructor = name == nme.TRAIT_CONSTRUCTOR
+
+ /** Is this the constructor of a trait or a class */
+ final def isConstructor = name.isConstructorName
+
+ /** Is this a local template dummmy? */
+ final def isLocalDummy: Boolean = name.isLocalDummyName
+
+ /** Does this symbol denote the primary constructor of its enclosing class? */
+ final def isPrimaryConstructor(implicit ctx: Context) =
+ isConstructor && owner.primaryConstructor == symbol
+
+ /** Does this symbol denote the static constructor of its enclosing class? */
+ final def isStaticConstructor(implicit ctx: Context) =
+ name.isStaticConstructorName
+
+ /** Is this a subclass of the given class `base`? */
+ def isSubClass(base: Symbol)(implicit ctx: Context) = false
+
+ /** Is this a subclass of `base`,
+ * and is the denoting symbol also different from `Null` or `Nothing`?
+ * @note erroneous classes are assumed to derive from all other classes
+ * and all classes derive from them.
+ */
+ def derivesFrom(base: Symbol)(implicit ctx: Context) = false
+
+ /** Is this symbol a class that extends `AnyVal`? */
+ final def isValueClass(implicit ctx: Context): Boolean = {
+ val di = initial
+ di.isClass &&
+ di.derivesFrom(defn.AnyValClass)(ctx.withPhase(di.validFor.firstPhaseId))
+ // We call derivesFrom at the initial phase both because AnyVal does not exist
+ // after Erasure and to avoid cyclic references caused by forcing denotations
+ }
+
+ /** Is this symbol a class references to which that are supertypes of null? */
+ final def isNullableClass(implicit ctx: Context): Boolean =
+ isClass && !isValueClass && !(this is ModuleClass) && symbol != defn.NothingClass
+
+ /** Is this definition accessible as a member of tree with type `pre`?
+ * @param pre The type of the tree from which the selection is made
+ * @param superAccess Access is via super
+ * Everything is accessible if `pre` is `NoPrefix`.
+ * A symbol with type `NoType` is not accessible for any other prefix.
+ */
+ final def isAccessibleFrom(pre: Type, superAccess: Boolean = false, whyNot: StringBuffer = null)(implicit ctx: Context): Boolean = {
+
+ /** Are we inside definition of `boundary`? */
+ def accessWithin(boundary: Symbol) =
+ ctx.owner.isContainedIn(boundary) &&
+ (!(this is JavaDefined) || // disregard package nesting for Java
+ ctx.owner.enclosingPackageClass == boundary.enclosingPackageClass)
+
+ /** Are we within definition of linked class of `boundary`? */
+ def accessWithinLinked(boundary: Symbol) = {
+ val linked = boundary.linkedClass
+ (linked ne NoSymbol) && accessWithin(linked)
+ }
+
+ /** Is `pre` the same as C.thisThis, where C is exactly the owner of this symbol,
+ * or, if this symbol is protected, a subclass of the owner?
+ */
+ def isCorrectThisType(pre: Type): Boolean = pre match {
+ case pre: ThisType =>
+ (pre.cls eq owner) || (this is Protected) && pre.cls.derivesFrom(owner)
+ case pre: TermRef =>
+ pre.symbol.moduleClass == owner
+ case _ =>
+ false
+ }
+
+ /** Is protected access to target symbol permitted? */
+ def isProtectedAccessOK = {
+ def fail(str: => String): Boolean = {
+ if (whyNot != null) whyNot append str
+ false
+ }
+ val cls = owner.enclosingSubClass
+ if (!cls.exists)
+ fail(
+ i"""
+ | Access to protected $this not permitted because enclosing ${ctx.owner.enclosingClass.showLocated}
+ | is not a subclass of ${owner.showLocated} where target is defined""")
+ else if (
+ !( isType // allow accesses to types from arbitrary subclasses fixes #4737
+ || pre.baseTypeRef(cls).exists // ??? why not use derivesFrom ???
+ || isConstructor
+ || (owner is ModuleClass) // don't perform this check for static members
+ ))
+ fail(
+ i"""
+ | Access to protected ${symbol.show} not permitted because prefix type ${pre.widen.show}
+ | does not conform to ${cls.showLocated} where the access takes place""")
+ else true
+ }
+
+ if (pre eq NoPrefix) true
+ else if (info eq NoType) false
+ else {
+ val boundary = accessBoundary(owner)
+
+ ( boundary.isTerm
+ || boundary.isRoot
+ || (accessWithin(boundary) || accessWithinLinked(boundary)) &&
+ ( !(this is Local)
+ || (owner is ImplClass) // allow private local accesses to impl class members
+ || isCorrectThisType(pre)
+ )
+ || (this is Protected) &&
+ ( superAccess
+ || pre.isInstanceOf[ThisType]
+ || ctx.phase.erasedTypes
+ || isProtectedAccessOK
+ )
+ )
+ }
+ }
+
+ /** Do members of this symbol need translation via asSeenFrom when
+ * accessed via prefix `pre`?
+ */
+ def membersNeedAsSeenFrom(pre: Type)(implicit ctx: Context) =
+ !( this.isTerm
+ || this.isStaticOwner
+ || ctx.erasedTypes
+ || (pre eq NoPrefix) || (pre eq thisType)
+ )
+
+ /** Is this symbol concrete, or that symbol deferred? */
+ def isAsConcrete(that: Symbol)(implicit ctx: Context): Boolean =
+ !(this is Deferred) || (that is Deferred)
+
+ /** Does this symbol have defined or inherited default parameters? */
+ def hasDefaultParams(implicit ctx: Context): Boolean =
+ if (this is HasDefaultParams) true
+ else if (this is NoDefaultParams) false
+ else {
+ val result = allOverriddenSymbols exists (_.hasDefaultParams)
+ setFlag(if (result) InheritedDefaultParams else NoDefaultParams)
+ result
+ }
+
+ /** Symbol is an owner that would be skipped by effectiveOwner. Skipped are
+ * - package objects
+ * - labels
+ * - non-lazy valdefs
+ */
+ def isWeakOwner(implicit ctx: Context): Boolean =
+ isPackageObject ||
+ isTerm && !is(MethodOrLazy, butNot = Label) && !isLocalDummy
+
+ // def isOverridable: Boolean = !!! need to enforce that classes cannot be redefined
+ def isSkolem: Boolean = name == nme.SKOLEM
+
+ def isInlineMethod(implicit ctx: Context): Boolean = is(InlineMethod, butNot = Accessor)
+
+ // ------ access to related symbols ---------------------------------
+
+ /* Modules and module classes are represented as follows:
+ *
+ * object X extends Y { def f() }
+ *
+ * <module> lazy val X: X$ = new X$
+ * <module> class X$ extends Y { this: X.type => def f() }
+ *
+ * During completion, references to moduleClass and sourceModules are stored in
+ * the completers.
+ */
+ /** The class implementing this module, NoSymbol if not applicable. */
+ final def moduleClass(implicit ctx: Context): Symbol = {
+ def notFound = { println(s"missing module class for $name: $myInfo"); NoSymbol }
+ if (this is ModuleVal)
+ myInfo match {
+ case info: TypeRef => info.symbol
+ case ExprType(info: TypeRef) => info.symbol // needed after uncurry, when module terms might be accessor defs
+ case info: LazyType => info.moduleClass
+ case t: MethodType =>
+ t.resultType match {
+ case info: TypeRef => info.symbol
+ case _ => notFound
+ }
+ case _ => notFound
+ }
+ else NoSymbol
+ }
+
+ /** The module implemented by this module class, NoSymbol if not applicable. */
+ final def sourceModule(implicit ctx: Context): Symbol = myInfo match {
+ case ClassInfo(_, _, _, _, selfType) if this is ModuleClass =>
+ selfType match {
+ case selfType: TermRef => selfType.symbol
+ case selfType: Symbol => selfType.info.asInstanceOf[TermRef].symbol
+ }
+ case info: LazyType =>
+ info.sourceModule
+ case _ =>
+ NoSymbol
+ }
+
+ /** The field accessed by this getter or setter, or if it does not exist, the getter */
+ def accessedFieldOrGetter(implicit ctx: Context): Symbol = {
+ val fieldName = if (isSetter) name.asTermName.getterName else name
+ val d = owner.info.decl(fieldName)
+ val field = d.suchThat(!_.is(Method)).symbol
+ def getter = d.suchThat(_.info.isParameterless).symbol
+ field orElse getter
+ }
+
+ /** The field accessed by a getter or setter, or
+ * if it does not exists, the getter of a setter, or
+ * if that does not exist the symbol itself.
+ */
+ def underlyingSymbol(implicit ctx: Context): Symbol =
+ if (is(Accessor)) accessedFieldOrGetter orElse symbol else symbol
+
+ /** The chain of owners of this denotation, starting with the denoting symbol itself */
+ final def ownersIterator(implicit ctx: Context) = new Iterator[Symbol] {
+ private[this] var current = symbol
+ def hasNext = current.exists
+ def next: Symbol = {
+ val result = current
+ current = current.owner
+ result
+ }
+ }
+
+ /** If this is a weak owner, its owner, otherwise the denoting symbol. */
+ final def skipWeakOwner(implicit ctx: Context): Symbol =
+ if (isWeakOwner) owner.skipWeakOwner else symbol
+
+ /** The owner, skipping package objects, labels and non-lazy valdefs. */
+ final def effectiveOwner(implicit ctx: Context) = owner.skipWeakOwner
+
+ /** The class containing this denotation.
+ * If this denotation is already a class, return itself
+ * Definitions flagged with InSuperCall are treated specially.
+ * Their enclosing class is not the lexically enclosing class,
+ * but in turn the enclosing class of the latter. This reflects
+ * the context created by `Context#superCallContext`, `Context#thisCallArgContext`
+ * for these definitions.
+ *
+ * Note, that as packages have ClassSymbols, top level classes will have an `enclosingClass`
+ * with Package flag set.
+ */
+ final def enclosingClass(implicit ctx: Context): Symbol = {
+ def enclClass(sym: Symbol, skip: Boolean): Symbol = {
+ def newSkip = sym.is(InSuperCall) || sym.is(JavaStaticTerm)
+ if (!sym.exists)
+ NoSymbol
+ else if (sym.isClass)
+ if (skip) enclClass(sym.owner, newSkip) else sym
+ else
+ enclClass(sym.owner, skip || newSkip)
+ }
+ enclClass(symbol, false)
+ }
+
+ /** A class that in source code would be lexically enclosing */
+ final def lexicallyEnclosingClass(implicit ctx: Context): Symbol =
+ if (!exists || isClass) symbol else owner.lexicallyEnclosingClass
+
+ /** A symbol is effectively final if it cannot be overridden in a subclass */
+ final def isEffectivelyFinal(implicit ctx: Context): Boolean =
+ is(PrivateOrFinalOrInline) || !owner.isClass || owner.is(ModuleOrFinal) || owner.isAnonymousClass
+
+ /** The class containing this denotation which has the given effective name. */
+ final def enclosingClassNamed(name: Name)(implicit ctx: Context): Symbol = {
+ val cls = enclosingClass
+ if (cls.effectiveName == name || !cls.exists) cls else cls.owner.enclosingClassNamed(name)
+ }
+
+ /** The closest enclosing method containing this definition.
+ * A local dummy owner is mapped to the primary constructor of the class.
+ */
+ final def enclosingMethod(implicit ctx: Context): Symbol =
+ if (this is (Method, butNot = Label)) symbol
+ else if (this.isClass) primaryConstructor
+ else if (this.exists) owner.enclosingMethod
+ else NoSymbol
+
+ /** The top-level class containing this denotation,
+ * except for a toplevel module, where its module class is returned.
+ */
+ final def topLevelClass(implicit ctx: Context): Symbol = {
+ def topLevel(d: SymDenotation): Symbol = {
+ if (d.isEffectiveRoot || (d is PackageClass) || (d.owner is PackageClass)) d.symbol
+ else topLevel(d.owner)
+ }
+ val sym = topLevel(this)
+ if (sym.isClass) sym else sym.moduleClass
+ }
+
+ /** The package class containing this denotation */
+ final def enclosingPackageClass(implicit ctx: Context): Symbol =
+ if (this is PackageClass) symbol else owner.enclosingPackageClass
+
+ /** The module object with the same (term-) name as this class or module class,
+ * and which is also defined in the same scope and compilation unit.
+ * NoSymbol if this module does not exist.
+ */
+ final def companionModule(implicit ctx: Context): Symbol = {
+ if (this.flagsUNSAFE is Flags.Module) this.sourceModule
+ else {
+ val companionMethod = info.decls.denotsNamed(nme.COMPANION_MODULE_METHOD, selectPrivate).first
+ if (companionMethod.exists)
+ companionMethod.info.resultType.classSymbol.sourceModule
+ else
+ NoSymbol
+ }
+ }
+
+
+ /** The class with the same (type-) name as this module or module class,
+ * and which is also defined in the same scope and compilation unit.
+ * NoSymbol if this class does not exist.
+ */
+ final def companionClass(implicit ctx: Context): Symbol = {
+ val companionMethod = info.decls.denotsNamed(nme.COMPANION_CLASS_METHOD, selectPrivate).first
+
+ if (companionMethod.exists)
+ companionMethod.info.resultType.classSymbol
+ else
+ NoSymbol
+ }
+
+ final def scalacLinkedClass(implicit ctx: Context): Symbol =
+ if (this is ModuleClass) companionNamed(effectiveName.toTypeName)
+ else if (this.isClass) companionNamed(effectiveName.moduleClassName).sourceModule.moduleClass
+ else NoSymbol
+
+
+ /** Find companion class symbol with given name, or NoSymbol if none exists.
+ * Three alternative strategies:
+ * 1. If owner is a class, look in its members, otherwise
+ * 2. If current compilation unit has a typed tree,
+ * determine the defining statement sequence and search its trees, otherwise
+ * 3. If context has an enclosing scope which defines this symbol,
+ * lookup its companion in the same scope.
+ */
+ private def companionNamed(name: TypeName)(implicit ctx: Context): Symbol =
+ if (owner.isClass)
+ owner.info.decl(name).suchThat(_.isCoDefinedWith(symbol)).symbol
+ else if (!owner.exists || ctx.compilationUnit == null)
+ NoSymbol
+ else if (!ctx.compilationUnit.tpdTree.isEmpty)
+ tpd.definingStats(symbol).iterator
+ .map(tpd.definedSym)
+ .find(_.name == name)
+ .getOrElse(NoSymbol)
+ else if (ctx.scope == null)
+ NoSymbol
+ else if (ctx.scope.lookup(this.name) == symbol)
+ ctx.scope.lookup(name)
+ else
+ companionNamed(name)(ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next)
+
+ /** If this is a class, the module class of its companion object.
+ * If this is a module class, its companion class.
+ * NoSymbol otherwise.
+ */
+ final def linkedClass(implicit ctx: Context): Symbol =
+ if (this is ModuleClass) companionClass
+ else if (this.isClass) companionModule.moduleClass
+ else NoSymbol
+
+ /** The class that encloses the owner of the current context
+ * and that is a subclass of this class. NoSymbol if no such class exists.
+ */
+ final def enclosingSubClass(implicit ctx: Context) =
+ ctx.owner.ownersIterator.findSymbol(_.isSubClass(symbol))
+
+ /** The non-private symbol whose name and type matches the type of this symbol
+ * in the given class.
+ * @param inClass The class containing the result symbol's definition
+ * @param site The base type from which member types are computed
+ *
+ * inClass <-- find denot.symbol class C { <-- symbol is here
+ *
+ * site: Subtype of both inClass and C
+ */
+ final def matchingDecl(inClass: Symbol, site: Type)(implicit ctx: Context): Symbol = {
+ var denot = inClass.info.nonPrivateDecl(name)
+ if (denot.isTerm) // types of the same name always match
+ denot = denot.matchingDenotation(site, site.memberInfo(symbol))
+ denot.symbol
+ }
+
+ /** The non-private member of `site` whose name and type matches the type of this symbol
+ */
+ final def matchingMember(site: Type)(implicit ctx: Context): Symbol = {
+ var denot = site.nonPrivateMember(name)
+ if (denot.isTerm) // types of the same name always match
+ denot = denot.matchingDenotation(site, site.memberInfo(symbol))
+ denot.symbol
+ }
+
+ /** If false, this symbol cannot possibly participate in an override,
+ * either as overrider or overridee.
+ */
+ final def canMatchInheritedSymbols(implicit ctx: Context): Boolean =
+ maybeOwner.isClass && memberCanMatchInheritedSymbols
+
+ /** If false, this class member cannot possibly participate in an override,
+ * either as overrider or overridee.
+ */
+ final def memberCanMatchInheritedSymbols(implicit ctx: Context): Boolean =
+ !isConstructor && !is(Private)
+
+ /** The symbol, in class `inClass`, that is overridden by this denotation. */
+ final def overriddenSymbol(inClass: ClassSymbol)(implicit ctx: Context): Symbol =
+ if (!canMatchInheritedSymbols && (owner ne inClass)) NoSymbol
+ else matchingDecl(inClass, owner.thisType)
+
+ /** All symbols overriden by this denotation. */
+ final def allOverriddenSymbols(implicit ctx: Context): Iterator[Symbol] =
+ if (!canMatchInheritedSymbols) Iterator.empty
+ else overriddenFromType(owner.info)
+
+ /** Returns all matching symbols defined in parents of the selftype. */
+ final def extendedOverriddenSymbols(implicit ctx: Context): Iterator[Symbol] =
+ if (!canMatchInheritedSymbols) Iterator.empty
+ else overriddenFromType(owner.asClass.classInfo.selfType)
+
+ private def overriddenFromType(tp: Type)(implicit ctx: Context): Iterator[Symbol] =
+ tp.baseClasses.tail.iterator map overriddenSymbol filter (_.exists)
+
+ /** The symbol overriding this symbol in given subclass `ofclazz`.
+ *
+ * @param ofclazz is a subclass of this symbol's owner
+ */
+ final def overridingSymbol(inClass: ClassSymbol)(implicit ctx: Context): Symbol =
+ if (canMatchInheritedSymbols) matchingDecl(inClass, inClass.thisType)
+ else NoSymbol
+
+ /** The symbol accessed by a super in the definition of this symbol when
+ * seen from class `base`. This symbol is always concrete.
+ * pre: `this.owner` is in the base class sequence of `base`.
+ */
+ final def superSymbolIn(base: Symbol)(implicit ctx: Context): Symbol = {
+ def loop(bcs: List[ClassSymbol]): Symbol = bcs match {
+ case bc :: bcs1 =>
+ val sym = matchingDecl(bcs.head, base.thisType)
+ .suchThat(alt => !(alt is Deferred)).symbol
+ if (sym.exists) sym else loop(bcs.tail)
+ case _ =>
+ NoSymbol
+ }
+ loop(base.info.baseClasses.dropWhile(owner != _).tail)
+ }
+
+ /** A member of class `base` is incomplete if
+ * (1) it is declared deferred or
+ * (2) it is abstract override and its super symbol in `base` is
+ * nonexistent or incomplete.
+ */
+ final def isIncompleteIn(base: Symbol)(implicit ctx: Context): Boolean =
+ (this is Deferred) ||
+ (this is AbsOverride) && {
+ val supersym = superSymbolIn(base)
+ supersym == NoSymbol || supersym.isIncompleteIn(base)
+ }
+
+ /** The class or term symbol up to which this symbol is accessible,
+ * or RootClass if it is public. As java protected statics are
+ * otherwise completely inaccessible in scala, they are treated
+ * as public.
+ * @param base The access boundary to assume if this symbol is protected
+ */
+ final def accessBoundary(base: Symbol)(implicit ctx: Context): Symbol = {
+ val fs = flags
+ if (fs is Private) owner
+ else if (fs is StaticProtected) defn.RootClass
+ else if (privateWithin.exists && !ctx.phase.erasedTypes) privateWithin
+ else if (fs is Protected) base
+ else defn.RootClass
+ }
+
+ /** The primary constructor of a class or trait, NoSymbol if not applicable. */
+ def primaryConstructor(implicit ctx: Context): Symbol = NoSymbol
+
+ // ----- type-related ------------------------------------------------
+
+ /** The type parameters of a class symbol, Nil for all other symbols */
+ def typeParams(implicit ctx: Context): List[TypeSymbol] = Nil
+
+ /** The named type parameters declared or inherited by this symbol */
+ def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = Set()
+
+ /** The type This(cls), where cls is this class, NoPrefix for all other symbols */
+ def thisType(implicit ctx: Context): Type = NoPrefix
+
+ override def typeRef(implicit ctx: Context): TypeRef =
+ TypeRef(owner.thisType, name.asTypeName, this)
+
+ override def termRef(implicit ctx: Context): TermRef =
+ TermRef(owner.thisType, name.asTermName, this)
+
+ override def valRef(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(owner.thisType, name.asTermName, Signature.NotAMethod, this)
+
+ override def termRefWithSig(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(owner.thisType, name.asTermName, signature, this)
+
+ def nonMemberTermRef(implicit ctx: Context): TermRef =
+ TermRef.withFixedSym(owner.thisType, name.asTermName, symbol.asTerm)
+
+ /** The variance of this type parameter or type member as an Int, with
+ * +1 = Covariant, -1 = Contravariant, 0 = Nonvariant, or not a type parameter
+ */
+ final def variance(implicit ctx: Context): Int =
+ if (this is Covariant) 1
+ else if (this is Contravariant) -1
+ else 0
+
+ /** The flags to be used for a type parameter owned by this symbol.
+ * Overridden by ClassDenotation.
+ */
+ def typeParamCreationFlags: FlagSet = TypeParam
+
+ override def toString = {
+ val kindString =
+ if (myFlags is ModuleClass) "module class"
+ else if (isClass) "class"
+ else if (isType) "type"
+ else if (myFlags is Module) "module"
+ else if (myFlags is Method) "method"
+ else "val"
+ s"$kindString $name"
+ }
+
+ // ----- Sanity checks and debugging */
+
+ def debugString = toString + "#" + symbol.id // !!! DEBUG
+
+ def hasSkolems(tp: Type): Boolean = tp match {
+ case tp: SkolemType => true
+ case tp: NamedType => hasSkolems(tp.prefix)
+ case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo)
+ case tp: RecType => hasSkolems(tp.parent)
+ case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType)
+ case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType)
+ case tp: ExprType => hasSkolems(tp.resType)
+ case tp: HKApply => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems)
+ case tp: AndOrType => hasSkolems(tp.tp1) || hasSkolems(tp.tp2)
+ case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi)
+ case tp: AnnotatedType => hasSkolems(tp.tpe)
+ case tp: TypeVar => hasSkolems(tp.inst)
+ case _ => false
+ }
+
+ def assertNoSkolems(tp: Type) =
+ if (!this.isSkolem)
+ assert(!hasSkolems(tp), s"assigning type $tp containing skolems to $this")
+
+ // ----- copies and transforms ----------------------------------------
+
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new UniqueRefDenotation(s, i, validFor)
+
+ /** Copy this denotation, overriding selective fields */
+ final def copySymDenotation(
+ symbol: Symbol = this.symbol,
+ owner: Symbol = this.owner,
+ name: Name = this.name,
+ initFlags: FlagSet = UndefinedFlags,
+ info: Type = null,
+ privateWithin: Symbol = null,
+ annotations: List[Annotation] = null)(implicit ctx: Context) =
+ { // simulate default parameters, while also passing implicit context ctx to the default values
+ val initFlags1 = (if (initFlags != UndefinedFlags) initFlags else this.flags) &~ Frozen
+ val info1 = if (info != null) info else this.info
+ val privateWithin1 = if (privateWithin != null) privateWithin else this.privateWithin
+ val annotations1 = if (annotations != null) annotations else this.annotations
+ val d = ctx.SymDenotation(symbol, owner, name, initFlags1, info1, privateWithin1)
+ d.annotations = annotations1
+ d
+ }
+
+ override def initial: SymDenotation = super.initial.asSymDenotation
+
+ /** Install this denotation as the result of the given denotation transformer. */
+ override def installAfter(phase: DenotTransformer)(implicit ctx: Context): Unit =
+ super.installAfter(phase)
+
+ /** Apply a transformation `f` to all denotations in this group that start at or after
+ * given phase. Denotations are replaced while keeping the same validity periods.
+ */
+ override def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(implicit ctx: Context): Unit =
+ super.transformAfter(phase, f)
+
+ /** If denotation is private, remove the Private flag and expand the name if necessary */
+ def ensureNotPrivate(implicit ctx: Context) =
+ if (is(Private))
+ copySymDenotation(
+ name = expandedName,
+ initFlags = this.flags &~ Private | ExpandedName)
+ else this
+ }
+
+ /** The contents of a class definition during a period
+ */
+ class ClassDenotation private[SymDenotations] (
+ symbol: Symbol,
+ ownerIfExists: Symbol,
+ name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol,
+ initRunId: RunId)
+ extends SymDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) {
+
+ import util.LRUCache
+
+ // ----- denotation fields and accessors ------------------------------
+
+ if (initFlags is (Module, butNot = Package)) assert(name.isModuleClassName, s"module naming inconsistency: $name")
+
+ /** The symbol asserted to have type ClassSymbol */
+ def classSymbol: ClassSymbol = symbol.asInstanceOf[ClassSymbol]
+
+ /** The info asserted to have type ClassInfo */
+ def classInfo(implicit ctx: Context): ClassInfo = info.asInstanceOf[ClassInfo]
+
+ /** TODO: Document why caches are supposedly safe to use */
+ private[this] var myTypeParams: List[TypeSymbol] = _
+
+ private[this] var myNamedTypeParams: Set[TypeSymbol] = _
+
+ /** The type parameters in this class, in the order they appear in the current
+ * scope `decls`. This might be temporarily the incorrect order when
+ * reading Scala2 pickled info. The problem is fixed by `updateTypeParams`
+ * which is called once an unpickled symbol has been completed.
+ */
+ private def typeParamsFromDecls(implicit ctx: Context) =
+ unforcedDecls.filter(sym =>
+ (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
+
+ /** The type parameters of this class */
+ override final def typeParams(implicit ctx: Context): List[TypeSymbol] = {
+ if (myTypeParams == null)
+ myTypeParams =
+ if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls
+ else {
+ val di = initial
+ if (this ne di) di.typeParams
+ else infoOrCompleter match {
+ case info: TypeParamsCompleter => info.completerTypeParams(symbol)
+ case _ => typeParamsFromDecls
+ }
+ }
+ myTypeParams
+ }
+
+ /** The named type parameters declared or inherited by this class */
+ override final def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = {
+ def computeNamedTypeParams: Set[TypeSymbol] =
+ if (ctx.erasedTypes || is(Module)) Set() // fast return for modules to avoid scanning package decls
+ else memberNames(abstractTypeNameFilter).map(name =>
+ info.member(name).symbol.asType).filter(_.is(TypeParam, butNot = ExpandedName)).toSet
+ if (myNamedTypeParams == null) myNamedTypeParams = computeNamedTypeParams
+ myNamedTypeParams
+ }
+
+ override protected[dotc] final def info_=(tp: Type) = {
+ super.info_=(tp)
+ myTypeParams = null // changing the info might change decls, and with it typeParams
+ }
+
+ /** The denotations of all parents in this class. */
+ def classParents(implicit ctx: Context): List[TypeRef] = info match {
+ case classInfo: ClassInfo => classInfo.classParents
+ case _ => Nil
+ }
+
+ /** The symbol of the superclass, NoSymbol if no superclass exists */
+ def superClass(implicit ctx: Context): Symbol = classParents match {
+ case parent :: _ =>
+ val cls = parent.classSymbol
+ if (cls is Trait) NoSymbol else cls
+ case _ =>
+ NoSymbol
+ }
+
+ /** The denotation is fully completed: all attributes are fully defined.
+ * ClassDenotations compiled from source are first completed, then fully completed.
+ * Packages are never fully completed since members can be added at any time.
+ * @see Namer#ClassCompleter
+ */
+ private def isFullyCompleted(implicit ctx: Context): Boolean = {
+ def isFullyCompletedRef(tp: TypeRef) = tp.denot match {
+ case d: ClassDenotation => d.isFullyCompleted
+ case _ => false
+ }
+ def testFullyCompleted =
+ if (classParents.isEmpty) !is(Package) && symbol.eq(defn.AnyClass)
+ else classParents.forall(isFullyCompletedRef)
+ flagsUNSAFE.is(FullyCompleted) ||
+ isCompleted && testFullyCompleted && { setFlag(FullyCompleted); true }
+ }
+
+ // ------ syncing inheritance-related info -----------------------------
+
+ private var firstRunId: RunId = initRunId
+
+ /** invalidate caches influenced by parent classes if one of the parents
+ * is younger than the denotation itself.
+ */
+ override def syncWithParents(implicit ctx: Context): SingleDenotation = {
+ def isYounger(tref: TypeRef) = tref.symbol.denot match {
+ case denot: ClassDenotation =>
+ if (denot.validFor.runId < ctx.runId) denot.current // syncs with its parents in turn
+ val result = denot.firstRunId > this.firstRunId
+ if (result) incremental.println(s"$denot is younger than $this")
+ result
+ case _ => false
+ }
+ val parentIsYounger = (firstRunId < ctx.runId) && {
+ infoOrCompleter match {
+ case cinfo: ClassInfo => cinfo.classParents exists isYounger
+ case _ => false
+ }
+ }
+ if (parentIsYounger) {
+ incremental.println(s"parents of $this are invalid; symbol id = ${symbol.id}, copying ...\n")
+ invalidateInheritedInfo()
+ }
+ firstRunId = ctx.runId
+ this
+ }
+
+ /** Invalidate all caches and fields that depend on base classes and their contents */
+ override def invalidateInheritedInfo(): Unit = {
+ myBaseClasses = null
+ mySuperClassBits = null
+ myMemberFingerPrint = FingerPrint.unknown
+ myMemberCache = null
+ myMemberCachePeriod = Nowhere
+ memberNamesCache = SimpleMap.Empty
+ }
+
+ // ------ class-specific operations -----------------------------------
+
+ private[this] var myThisType: Type = null
+
+ /** The this-type depends on the kind of class:
+ * - for a package class `p`: ThisType(TypeRef(Noprefix, p))
+ * - for a module class `m`: A term ref to m's source module.
+ * - for all other classes `c` with owner `o`: ThisType(TypeRef(o.thisType, c))
+ */
+ override def thisType(implicit ctx: Context): Type = {
+ if (myThisType == null) myThisType = computeThisType
+ myThisType
+ }
+
+ private def computeThisType(implicit ctx: Context): Type =
+ ThisType.raw(
+ TypeRef(if (this is Package) NoPrefix else owner.thisType, symbol.asType))
+/* else {
+ val pre = owner.thisType
+ if (this is Module)
+ if (isMissing(pre)) TermRef(pre, sourceModule.asTerm)
+ else TermRef.withSig(pre, name.sourceModuleName, Signature.NotAMethod)
+ else ThisType.raw(TypeRef(pre, symbol.asType))
+ }
+*/
+ private[this] var myTypeRef: TypeRef = null
+
+ override def typeRef(implicit ctx: Context): TypeRef = {
+ if (myTypeRef == null) myTypeRef = super.typeRef
+ myTypeRef
+ }
+
+ private[this] var myBaseClasses: List[ClassSymbol] = null
+ private[this] var mySuperClassBits: BitSet = null
+
+ /** Invalidate baseTypeRefCache, baseClasses and superClassBits on new run */
+ private def checkBasesUpToDate()(implicit ctx: Context) =
+ if (baseTypeRefValid != ctx.runId) {
+ baseTypeRefCache = new java.util.HashMap[CachedType, Type]
+ myBaseClasses = null
+ mySuperClassBits = null
+ baseTypeRefValid = ctx.runId
+ }
+
+ private def computeBases(implicit ctx: Context): (List[ClassSymbol], BitSet) = {
+ if (myBaseClasses eq Nil) throw CyclicReference(this)
+ myBaseClasses = Nil
+ val seen = new mutable.BitSet
+ val locked = new mutable.BitSet
+ def addBaseClasses(bcs: List[ClassSymbol], to: List[ClassSymbol])
+ : List[ClassSymbol] = bcs match {
+ case bc :: bcs1 =>
+ val bcs1added = addBaseClasses(bcs1, to)
+ val id = bc.superId
+ if (seen contains id) bcs1added
+ else {
+ seen += id
+ bc :: bcs1added
+ }
+ case nil =>
+ to
+ }
+ def addParentBaseClasses(ps: List[Type], to: List[ClassSymbol]): List[ClassSymbol] = ps match {
+ case p :: ps1 =>
+ addParentBaseClasses(ps1, addBaseClasses(p.baseClasses, to))
+ case nil =>
+ to
+ }
+ val bcs = classSymbol :: addParentBaseClasses(classParents, Nil)
+ val scbits = seen.toImmutable
+ if (isFullyCompleted) {
+ myBaseClasses = bcs
+ mySuperClassBits = scbits
+ }
+ else myBaseClasses = null
+ (bcs, scbits)
+ }
+
+ /** A bitset that contains the superId's of all base classes */
+ private def superClassBits(implicit ctx: Context): BitSet =
+ if (classParents.isEmpty) BitSet() // can happen when called too early in Namers
+ else {
+ checkBasesUpToDate()
+ if (mySuperClassBits != null) mySuperClassBits else computeBases._2
+ }
+
+ /** The base classes of this class in linearization order,
+ * with the class itself as first element.
+ */
+ def baseClasses(implicit ctx: Context): List[ClassSymbol] =
+ if (classParents.isEmpty) classSymbol :: Nil // can happen when called too early in Namers
+ else {
+ checkBasesUpToDate()
+ if (myBaseClasses != null) myBaseClasses else computeBases._1
+ }
+
+ final override def derivesFrom(base: Symbol)(implicit ctx: Context): Boolean =
+ !isAbsent &&
+ base.isClass &&
+ ( (symbol eq base)
+ || (superClassBits contains base.superId)
+ || (this is Erroneous)
+ || (base is Erroneous)
+ )
+
+ final override def isSubClass(base: Symbol)(implicit ctx: Context) =
+ derivesFrom(base) ||
+ base.isClass && (
+ (symbol eq defn.NothingClass) ||
+ (symbol eq defn.NullClass) && (base ne defn.NothingClass))
+
+ final override def typeParamCreationFlags = ClassTypeParamCreationFlags
+
+ private[this] var myMemberFingerPrint: FingerPrint = FingerPrint.unknown
+
+ private def computeMemberFingerPrint(implicit ctx: Context): FingerPrint = {
+ var fp = FingerPrint()
+ var e = info.decls.lastEntry
+ while (e != null) {
+ fp.include(e.name)
+ e = e.prev
+ }
+ var ps = classParents
+ while (ps.nonEmpty) {
+ val parent = ps.head.typeSymbol
+ parent.denot match {
+ case parentDenot: ClassDenotation =>
+ fp.include(parentDenot.memberFingerPrint)
+ if (parentDenot.isFullyCompleted) parentDenot.setFlag(Frozen)
+ case _ =>
+ }
+ ps = ps.tail
+ }
+ fp
+ }
+
+ /** A bloom filter for the names of all members in this class.
+ * Makes sense only for parent classes, and should definitely
+ * not be used for package classes because cache never
+ * gets invalidated.
+ */
+ def memberFingerPrint(implicit ctx: Context): FingerPrint =
+ if (myMemberFingerPrint != FingerPrint.unknown) myMemberFingerPrint
+ else {
+ val fp = computeMemberFingerPrint
+ if (isFullyCompleted) myMemberFingerPrint = fp
+ fp
+ }
+
+ private[this] var myMemberCache: LRUCache[Name, PreDenotation] = null
+ private[this] var myMemberCachePeriod: Period = Nowhere
+
+ private def memberCache(implicit ctx: Context): LRUCache[Name, PreDenotation] = {
+ if (myMemberCachePeriod != ctx.period) {
+ myMemberCache = new LRUCache
+ myMemberCachePeriod = ctx.period
+ }
+ myMemberCache
+ }
+
+ /** Enter a symbol in current scope, and future scopes of same denotation.
+ * Note: We require that this does not happen after the first time
+ * someone does a findMember on a subclass.
+ * @param scope The scope in which symbol should be entered.
+ * If this is EmptyScope, the scope is `decls`.
+ */
+ def enter(sym: Symbol, scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
+ val mscope = scope match {
+ case scope: MutableScope =>
+ // if enter gets a scope as an argument,
+ // than this is a scope that will eventually become decls of this symbol.
+ // And this should only happen if this is first time the scope of symbol
+ // is computed, ie symbol yet has no future.
+ assert(this.nextInRun.validFor.code <= this.validFor.code)
+ scope
+ case _ => unforcedDecls.openForMutations
+ }
+ if (this is PackageClass) {
+ val entry = mscope.lookupEntry(sym.name)
+ if (entry != null) {
+ if (entry.sym == sym) return
+ mscope.unlink(entry)
+ entry.sym.denot = sym.denot // to avoid stale symbols
+ }
+ }
+ enterNoReplace(sym, mscope)
+ val nxt = this.nextInRun
+ if (nxt.validFor.code > this.validFor.code) {
+ this.nextInRun.asSymDenotation.asClass.enter(sym)
+ }
+ }
+
+ /** Enter a symbol in given `scope` without potentially replacing the old copy. */
+ def enterNoReplace(sym: Symbol, scope: MutableScope)(implicit ctx: Context): Unit = {
+ def isUsecase = ctx.docCtx.isDefined && sym.name.show.takeRight(4) == "$doc"
+ require(
+ (sym.denot.flagsUNSAFE is Private) ||
+ !(this is Frozen) ||
+ (scope ne this.unforcedDecls) ||
+ sym.hasAnnotation(defn.ScalaStaticAnnot) ||
+ sym.name.isInlineAccessor ||
+ isUsecase)
+
+ scope.enter(sym)
+
+ if (myMemberFingerPrint != FingerPrint.unknown)
+ myMemberFingerPrint.include(sym.name)
+ if (myMemberCache != null)
+ myMemberCache invalidate sym.name
+ }
+
+ /** Replace symbol `prev` (if defined in current class) by symbol `replacement`.
+ * If `prev` is not defined in current class, do nothing.
+ * @pre `prev` and `replacement` have the same name.
+ */
+ def replace(prev: Symbol, replacement: Symbol)(implicit ctx: Context): Unit = {
+ require(!(this is Frozen))
+ unforcedDecls.openForMutations.replace(prev, replacement)
+ if (myMemberCache != null)
+ myMemberCache invalidate replacement.name
+ }
+
+ /** Delete symbol from current scope.
+ * Note: We require that this does not happen after the first time
+ * someone does a findMember on a subclass.
+ */
+ def delete(sym: Symbol)(implicit ctx: Context) = {
+ require(!(this is Frozen))
+ info.decls.openForMutations.unlink(sym)
+ myMemberFingerPrint = FingerPrint.unknown
+ if (myMemberCache != null) myMemberCache invalidate sym.name
+ }
+
+ /** Make sure the type parameters of this class appear in the order given
+ * by `typeParams` in the scope of the class. Reorder definitions in scope if necessary.
+ */
+ def ensureTypeParamsInCorrectOrder()(implicit ctx: Context): Unit = {
+ val tparams = typeParams
+ if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) {
+ val decls = info.decls
+ val decls1 = newScope
+ for (tparam <- typeParams) decls1.enter(decls.lookup(tparam.name))
+ for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym)
+ info = classInfo.derivedClassInfo(decls = decls1)
+ myTypeParams = null
+ }
+ }
+
+ /** All members of this class that have the given name.
+ * The elements of the returned pre-denotation all
+ * have existing symbols.
+ */
+ final def membersNamed(name: Name)(implicit ctx: Context): PreDenotation = {
+ val privates = info.decls.denotsNamed(name, selectPrivate)
+ privates union nonPrivateMembersNamed(name).filterDisjoint(privates)
+ }
+
+ /** All non-private members of this class that have the given name.
+ * The elements of the returned pre-denotation all
+ * have existing symbols.
+ * @param inherited The method is called on a parent class from computeNPMembersNamed
+ */
+ final def nonPrivateMembersNamed(name: Name, inherited: Boolean = false)(implicit ctx: Context): PreDenotation = {
+ Stats.record("nonPrivateMembersNamed")
+ if (Config.cacheMembersNamed) {
+ var denots: PreDenotation = memberCache lookup name
+ if (denots == null) {
+ denots = computeNPMembersNamed(name, inherited)
+ if (isFullyCompleted) memberCache.enter(name, denots)
+ } else if (Config.checkCacheMembersNamed) {
+ val denots1 = computeNPMembersNamed(name, inherited)
+ assert(denots.exists == denots1.exists, s"cache inconsistency: cached: $denots, computed $denots1, name = $name, owner = $this")
+ }
+ denots
+ } else computeNPMembersNamed(name, inherited)
+ }
+
+ private[core] def computeNPMembersNamed(name: Name, inherited: Boolean)(implicit ctx: Context): PreDenotation = /*>|>*/ Stats.track("computeNPMembersNamed") /*<|<*/ {
+ if (!inherited ||
+ !Config.useFingerPrints ||
+ (memberFingerPrint contains name)) {
+ Stats.record("computeNPMembersNamed after fingerprint")
+ ensureCompleted()
+ val ownDenots = info.decls.denotsNamed(name, selectNonPrivate)
+ if (debugTrace) // DEBUG
+ println(s"$this.member($name), ownDenots = $ownDenots")
+ def collect(denots: PreDenotation, parents: List[TypeRef]): PreDenotation = parents match {
+ case p :: ps =>
+ val denots1 = collect(denots, ps)
+ p.symbol.denot match {
+ case parentd: ClassDenotation =>
+ denots1 union
+ parentd.nonPrivateMembersNamed(name, inherited = true)
+ .mapInherited(ownDenots, denots1, thisType)
+ case _ =>
+ denots1
+ }
+ case nil =>
+ denots
+ }
+ if (name.isConstructorName) ownDenots
+ else collect(ownDenots, classParents)
+ } else NoDenotation
+ }
+
+ override final def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation = {
+ val raw = if (excluded is Private) nonPrivateMembersNamed(name) else membersNamed(name)
+ raw.filterExcluded(excluded).asSeenFrom(pre).toDenot(pre)
+ }
+
+ private[this] var baseTypeRefCache: java.util.HashMap[CachedType, Type] = null
+ private[this] var baseTypeRefValid: RunId = NoRunId
+
+ /** Compute tp.baseTypeRef(this) */
+ final def baseTypeRefOf(tp: Type)(implicit ctx: Context): Type = {
+
+ def foldGlb(bt: Type, ps: List[Type]): Type = ps match {
+ case p :: ps1 => foldGlb(bt & baseTypeRefOf(p), ps1)
+ case _ => bt
+ }
+
+ def inCache(tp: Type) = baseTypeRefCache.containsKey(tp)
+
+ /** We cannot cache:
+ * - type variables which are uninstantiated or whose instances can
+ * change, depending on typerstate.
+ * - types where the underlying type is an ErasedValueType, because
+ * this underlying type will change after ElimErasedValueType,
+ * and this changes subtyping relations. As a shortcut, we do not
+ * cache ErasedValueType at all.
+ */
+ def isCachable(tp: Type): Boolean = tp match {
+ case _: TypeErasure.ErasedValueType => false
+ case tp: TypeRef if tp.symbol.isClass => true
+ case tp: TypeVar => tp.inst.exists && inCache(tp.inst)
+ case tp: TypeProxy => inCache(tp.underlying)
+ case tp: AndOrType => inCache(tp.tp1) && inCache(tp.tp2)
+ case _ => true
+ }
+
+ def computeBaseTypeRefOf(tp: Type): Type = {
+ Stats.record("computeBaseTypeOf")
+ if (symbol.isStatic && tp.derivesFrom(symbol))
+ symbol.typeRef
+ else tp match {
+ case tp: TypeRef =>
+ val subcls = tp.symbol
+ if (subcls eq symbol)
+ tp
+ else subcls.denot match {
+ case cdenot: ClassDenotation =>
+ if (cdenot.superClassBits contains symbol.superId) foldGlb(NoType, tp.parents)
+ else NoType
+ case _ =>
+ baseTypeRefOf(tp.superType)
+ }
+ case tp: TypeProxy =>
+ baseTypeRefOf(tp.superType)
+ case AndType(tp1, tp2) =>
+ baseTypeRefOf(tp1) & baseTypeRefOf(tp2)
+ case OrType(tp1, tp2) =>
+ baseTypeRefOf(tp1) | baseTypeRefOf(tp2)
+ case JavaArrayType(_) if symbol == defn.ObjectClass =>
+ this.typeRef
+ case _ =>
+ NoType
+ }
+ }
+
+ /*>|>*/ ctx.debugTraceIndented(s"$tp.baseTypeRef($this)") /*<|<*/ {
+ tp match {
+ case tp: CachedType =>
+ checkBasesUpToDate()
+ var basetp = baseTypeRefCache get tp
+ if (basetp == null) {
+ baseTypeRefCache.put(tp, NoPrefix)
+ basetp = computeBaseTypeRefOf(tp)
+ if (isCachable(tp)) baseTypeRefCache.put(tp, basetp)
+ else baseTypeRefCache.remove(tp)
+ } else if (basetp == NoPrefix) {
+ baseTypeRefCache.put(tp, null)
+ throw CyclicReference(this)
+ }
+ basetp
+ case _ =>
+ computeBaseTypeRefOf(tp)
+ }
+ }
+ }
+
+ private[this] var memberNamesCache: SimpleMap[NameFilter, Set[Name]] = SimpleMap.Empty
+
+ def memberNames(keepOnly: NameFilter)(implicit ctx: Context): Set[Name] = {
+ def computeMemberNames: Set[Name] = {
+ var names = Set[Name]()
+ def maybeAdd(name: Name) = if (keepOnly(thisType, name)) names += name
+ for (p <- classParents)
+ for (name <- p.memberNames(keepOnly, thisType)) maybeAdd(name)
+ val ownSyms =
+ if (keepOnly == implicitFilter)
+ if (this is Package) Iterator.empty
+ else info.decls.iterator filter (_ is Implicit)
+ else info.decls.iterator
+ for (sym <- ownSyms) maybeAdd(sym.name)
+ names
+ }
+ if ((this is PackageClass) || !Config.cacheMemberNames)
+ computeMemberNames // don't cache package member names; they might change
+ else {
+ val cached = memberNamesCache(keepOnly)
+ if (cached != null) cached
+ else {
+ val names = computeMemberNames
+ if (isFullyCompleted) {
+ setFlag(Frozen)
+ memberNamesCache = memberNamesCache.updated(keepOnly, names)
+ }
+ names
+ }
+ }
+ }
+
+ private[this] var fullNameCache: SimpleMap[String, Name] = SimpleMap.Empty
+ override final def fullNameSeparated(separator: String)(implicit ctx: Context): Name = {
+ val cached = fullNameCache(separator)
+ if (cached != null) cached
+ else {
+ val fn = super.fullNameSeparated(separator)
+ fullNameCache = fullNameCache.updated(separator, fn)
+ fn
+ }
+ }
+
+ // to avoid overloading ambiguities
+ override def fullName(implicit ctx: Context): Name = super.fullName
+
+ override def primaryConstructor(implicit ctx: Context): Symbol = {
+ def constrNamed(cname: TermName) = info.decls.denotsNamed(cname).last.symbol
+ // denotsNamed returns Symbols in reverse order of occurrence
+ if (this.is(ImplClass)) constrNamed(nme.TRAIT_CONSTRUCTOR) // ignore normal constructor
+ else
+ constrNamed(nme.CONSTRUCTOR).orElse(constrNamed(nme.TRAIT_CONSTRUCTOR))
+ }
+
+ /** The parameter accessors of this class. Term and type accessors,
+ * getters and setters are all returned int his list
+ */
+ def paramAccessors(implicit ctx: Context): List[Symbol] =
+ unforcedDecls.filter(_ is ParamAccessor).toList
+
+ /** If this class has the same `decls` scope reference in `phase` and
+ * `phase.next`, install a new denotation with a cloned scope in `phase.next`.
+ */
+ def ensureFreshScopeAfter(phase: DenotTransformer)(implicit ctx: Context): Unit =
+ if (ctx.phaseId != phase.next.id) ensureFreshScopeAfter(phase)(ctx.withPhase(phase.next))
+ else {
+ val prevCtx = ctx.withPhase(phase)
+ val ClassInfo(pre, _, ps, decls, selfInfo) = classInfo
+ if (classInfo(prevCtx).decls eq decls)
+ copySymDenotation(info = ClassInfo(pre, classSymbol, ps, decls.cloneScope, selfInfo))
+ .installAfter(phase)
+ }
+ }
+
+ /** The denotation of a package class.
+ * It overrides ClassDenotation to take account of package objects when looking for members
+ */
+ class PackageClassDenotation private[SymDenotations] (
+ symbol: Symbol,
+ ownerIfExists: Symbol,
+ name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol,
+ initRunId: RunId)
+ extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin, initRunId) {
+
+ private[this] var packageObjCache: SymDenotation = _
+ private[this] var packageObjRunId: RunId = NoRunId
+
+ /** The package object in this class, of one exists */
+ def packageObj(implicit ctx: Context): SymDenotation = {
+ if (packageObjRunId != ctx.runId) {
+ packageObjRunId = ctx.runId
+ packageObjCache = NoDenotation // break cycle in case we are looking for package object itself
+ packageObjCache = findMember(nme.PACKAGE, thisType, EmptyFlags).asSymDenotation
+ }
+ packageObjCache
+ }
+
+ /** Look first for members in package; if none are found look in package object */
+ override def computeNPMembersNamed(name: Name, inherited: Boolean)(implicit ctx: Context): PreDenotation = {
+ val denots = super.computeNPMembersNamed(name, inherited)
+ if (denots.exists) denots
+ else packageObj.moduleClass.denot match {
+ case pcls: ClassDenotation => pcls.computeNPMembersNamed(name, inherited)
+ case _ => denots
+ }
+ }
+
+ /** The union of the member names of the package and the package object */
+ override def memberNames(keepOnly: NameFilter)(implicit ctx: Context): Set[Name] = {
+ val ownNames = super.memberNames(keepOnly)
+ packageObj.moduleClass.denot match {
+ case pcls: ClassDenotation => ownNames union pcls.memberNames(keepOnly)
+ case _ => ownNames
+ }
+ }
+ }
+
+ class NoDenotation extends SymDenotation(
+ NoSymbol, NoSymbol, "<none>".toTermName, Permanent, NoType) {
+ override def exists = false
+ override def isTerm = false
+ override def isType = false
+ override def owner: Symbol = throw new AssertionError("NoDenotation.owner")
+ override def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = this
+ override def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation = this
+ validFor = Period.allInRun(NoRunId) // will be brought forward automatically
+ }
+
+ @sharable val NoDenotation = new NoDenotation
+
+ // ---- Completion --------------------------------------------------------
+
+ /** Instances of LazyType are carried by uncompleted symbols.
+ * Note: LazyTypes double up as (constant) functions from Symbol and
+ * from (TermSymbol, ClassSymbol) to LazyType. That way lazy types can be
+ * directly passed to symbol creation methods in Symbols that demand instances
+ * of these function types.
+ */
+ abstract class LazyType extends UncachedGroundType
+ with (Symbol => LazyType)
+ with ((TermSymbol, ClassSymbol) => LazyType) { self =>
+
+ /** Sets all missing fields of given denotation */
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit
+
+ def apply(sym: Symbol) = this
+ def apply(module: TermSymbol, modcls: ClassSymbol) = this
+
+ private var myDecls: Scope = EmptyScope
+ private var mySourceModuleFn: Context => Symbol = NoSymbolFn
+ private var myModuleClassFn: Context => Symbol = NoSymbolFn
+
+ /** A proxy to this lazy type that keeps the complete operation
+ * but provides fresh slots for scope/sourceModule/moduleClass
+ */
+ def proxy: LazyType = new LazyType {
+ override def complete(denot: SymDenotation)(implicit ctx: Context) = self.complete(denot)
+ }
+
+ def decls: Scope = myDecls
+ def sourceModule(implicit ctx: Context): Symbol = mySourceModuleFn(ctx)
+ def moduleClass(implicit ctx: Context): Symbol = myModuleClassFn(ctx)
+
+ def withDecls(decls: Scope): this.type = { myDecls = decls; this }
+ def withSourceModule(sourceModuleFn: Context => Symbol): this.type = { mySourceModuleFn = sourceModuleFn; this }
+ def withModuleClass(moduleClassFn: Context => Symbol): this.type = { myModuleClassFn = moduleClassFn; this }
+ }
+
+ /** A subclass of LazyTypes where type parameters can be completed independently of
+ * the info.
+ */
+ trait TypeParamsCompleter extends LazyType {
+ /** The type parameters computed by the completer before completion has finished */
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol]
+ }
+
+ val NoSymbolFn = (ctx: Context) => NoSymbol
+
+ /** A missing completer */
+ @sharable class NoCompleter extends LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = unsupported("complete")
+ }
+
+ object NoCompleter extends NoCompleter
+
+ /** A lazy type for modules that points to the module class.
+ * Needed so that `moduleClass` works before completion.
+ * Completion of modules is always completion of the underlying
+ * module class, followed by copying the relevant fields to the module.
+ */
+ class ModuleCompleter(_moduleClass: ClassSymbol) extends LazyType {
+ override def moduleClass(implicit ctx: Context) = _moduleClass
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val from = moduleClass.denot.asClass
+ denot.setFlag(from.flags.toTermFlags & RetainedModuleValFlags)
+ denot.annotations = from.annotations filter (_.appliesToModule)
+ // !!! ^^^ needs to be revised later. The problem is that annotations might
+ // only apply to the module but not to the module class. The right solution
+ // is to have the module class completer set the annotations of both the
+ // class and the module.
+ denot.info = moduleClass.typeRef
+ denot.privateWithin = from.privateWithin
+ }
+ }
+
+ /** A completer for missing references */
+ class StubInfo() extends LazyType {
+
+ def initializeToDefaults(denot: SymDenotation)(implicit ctx: Context) = {
+ denot.info = denot match {
+ case denot: ClassDenotation =>
+ ClassInfo(denot.owner.thisType, denot.classSymbol, Nil, EmptyScope)
+ case _ =>
+ ErrorType
+ }
+ denot.privateWithin = NoSymbol
+ }
+
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val sym = denot.symbol
+ val file = sym.associatedFile
+ val (location, src) =
+ if (file != null) (s" in $file", file.toString)
+ else ("", "the signature")
+ val name = ctx.fresh.setSetting(ctx.settings.debugNames, true).nameString(denot.name)
+ ctx.error(
+ i"""bad symbolic reference. A signature$location
+ |refers to $name in ${denot.owner.showKind} ${denot.owner.showFullName} which is not available.
+ |It may be completely missing from the current classpath, or the version on
+ |the classpath might be incompatible with the version used when compiling $src.""")
+ if (ctx.debug) throw new Error()
+ initializeToDefaults(denot)
+ }
+ }
+
+ // ---- Fingerprints -----------------------------------------------------
+
+ /** A fingerprint is a bitset that acts as a bloom filter for sets
+ * of names.
+ */
+ class FingerPrint(val bits: Array[Long]) extends AnyVal {
+ import FingerPrint._
+
+ /** Include some bits of name's hashcode in set */
+ def include(name: Name): Unit = {
+ val hash = name.hashCode & Mask
+ bits(hash >> WordSizeLog) |= (1L << hash)
+ }
+
+ /** Include all bits of `that` fingerprint in set */
+ def include(that: FingerPrint): Unit =
+ for (i <- 0 until NumWords) bits(i) |= that.bits(i)
+
+ /** Does set contain hash bits of given name? */
+ def contains(name: Name): Boolean = {
+ val hash = name.hashCode & Mask
+ (bits(hash >> WordSizeLog) & (1L << hash)) != 0
+ }
+ }
+
+ object FingerPrint {
+ def apply() = new FingerPrint(new Array[Long](NumWords))
+ val unknown = new FingerPrint(null)
+ private final val WordSizeLog = 6
+ private final val NumWords = 32
+ private final val NumBits = NumWords << WordSizeLog
+ private final val Mask = NumBits - 1
+ }
+
+ private val AccessorOrLabel = Accessor | Label
+
+ @sharable private var indent = 0 // for completions printing
+}
diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
new file mode 100644
index 000000000..4ae28c10b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
@@ -0,0 +1,267 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools
+package dotc
+package core
+
+import java.io.IOException
+import scala.compat.Platform.currentTime
+import dotty.tools.io.{ ClassPath, AbstractFile }
+import Contexts._, Symbols._, Flags._, SymDenotations._, Types._, Scopes._, util.Positions._, Names._
+import StdNames._, NameOps._
+import Decorators.{StringDecorator, StringInterpolators}
+import classfile.ClassfileParser
+import scala.util.control.NonFatal
+
+object SymbolLoaders {
+ /** A marker trait for a completer that replaces the original
+ * Symbol loader for an unpickled root.
+ */
+ trait SecondCompleter
+}
+
+/** A base class for Symbol loaders with some overridable behavior */
+class SymbolLoaders {
+
+ protected def enterNew(
+ owner: Symbol, member: Symbol,
+ completer: SymbolLoader, scope: Scope = EmptyScope)(implicit ctx: Context): Symbol = {
+ assert(scope.lookup(member.name) == NoSymbol, s"${owner.fullName}.${member.name} already has a symbol")
+ owner.asClass.enter(member, scope)
+ member
+ }
+
+ /** Enter class with given `name` into scope of `owner`.
+ */
+ def enterClass(
+ owner: Symbol, name: PreName, completer: SymbolLoader,
+ flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(implicit ctx: Context): Symbol = {
+ val cls = ctx.newClassSymbol(owner, name.toTypeName, flags, completer, assocFile = completer.sourceFileOrNull)
+ enterNew(owner, cls, completer, scope)
+ }
+
+ /** Enter module with given `name` into scope of `owner`.
+ */
+ def enterModule(
+ owner: Symbol, name: PreName, completer: SymbolLoader,
+ modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(implicit ctx: Context): Symbol = {
+ val module = ctx.newModuleSymbol(
+ owner, name.toTermName, modFlags, clsFlags,
+ (module, _) => completer.proxy withDecls newScope withSourceModule (_ => module),
+ assocFile = completer.sourceFileOrNull)
+ enterNew(owner, module, completer, scope)
+ enterNew(owner, module.moduleClass, completer, scope)
+ }
+
+ /** Enter package with given `name` into scope of `owner`
+ * and give them `completer` as type.
+ */
+ def enterPackage(owner: Symbol, pkg: ClassPath)(implicit ctx: Context): Symbol = {
+ val pname = pkg.name.toTermName
+ val preExisting = owner.info.decls lookup pname
+ if (preExisting != NoSymbol) {
+ // Some jars (often, obfuscated ones) include a package and
+ // object with the same name. Rather than render them unusable,
+ // offer a setting to resolve the conflict one way or the other.
+ // This was motivated by the desire to use YourKit probes, which
+ // require yjp.jar at runtime. See SI-2089.
+ if (ctx.settings.termConflict.isDefault)
+ throw new TypeError(
+ i"""$owner contains object and package with same name: $pname
+ |one of them needs to be removed from classpath""")
+ else if (ctx.settings.termConflict.value == "package") {
+ ctx.warning(
+ s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.")
+ owner.asClass.delete(preExisting)
+ } else {
+ ctx.warning(
+ s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.")
+ return NoSymbol
+ }
+ }
+ ctx.newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags,
+ (module, modcls) => new PackageLoader(module, pkg)).entered
+ }
+
+ /** Enter class and module with given `name` into scope of `owner`
+ * and give them `completer` as type.
+ */
+ def enterClassAndModule(
+ owner: Symbol, name: PreName, completer: SymbolLoader,
+ flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
+ val clazz = enterClass(owner, name, completer, flags, scope)
+ val module = enterModule(
+ owner, name, completer,
+ modFlags = flags.toTermFlags & RetainedModuleValFlags,
+ clsFlags = flags.toTypeFlags & RetainedModuleClassFlags,
+ scope = scope)
+ }
+
+ /** In batch mode: Enter class and module with given `name` into scope of `owner`
+ * and give them a source completer for given `src` as type.
+ * In IDE mode: Find all toplevel definitions in `src` and enter then into scope of `owner`
+ * with source completer for given `src` as type.
+ * (overridden in interactive.Global).
+ */
+ def enterToplevelsFromSource(
+ owner: Symbol, name: PreName, src: AbstractFile,
+ scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
+ enterClassAndModule(owner, name, new SourcefileLoader(src), scope = scope)
+ }
+
+ /** The package objects of scala and scala.reflect should always
+ * be loaded in binary if classfiles are available, even if sourcefiles
+ * are newer. Late-compiling these objects from source leads to compilation
+ * order issues.
+ * Note: We do a name-base comparison here because the method is called before we even
+ * have ReflectPackage defined.
+ */
+ def binaryOnly(owner: Symbol, name: String)(implicit ctx: Context): Boolean =
+ name == "package" &&
+ (owner.fullName.toString == "scala" || owner.fullName.toString == "scala.reflect")
+
+ /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
+ */
+ def initializeFromClassPath(owner: Symbol, classRep: ClassPath#ClassRep)(implicit ctx: Context): Unit = {
+ ((classRep.binary, classRep.source): @unchecked) match {
+ case (Some(bin), Some(src)) if needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
+ if (ctx.settings.verbose.value) ctx.inform("[symloader] picked up newer source file for " + src.path)
+ enterToplevelsFromSource(owner, classRep.name, src)
+ case (None, Some(src)) =>
+ if (ctx.settings.verbose.value) ctx.inform("[symloader] no class, picked up source file for " + src.path)
+ enterToplevelsFromSource(owner, classRep.name, src)
+ case (Some(bin), _) =>
+ enterClassAndModule(owner, classRep.name, ctx.platform.newClassLoader(bin))
+ }
+ }
+
+ def needCompile(bin: AbstractFile, src: AbstractFile) =
+ src.lastModified >= bin.lastModified
+
+ /** Load contents of a package
+ */
+ class PackageLoader(_sourceModule: TermSymbol, classpath: ClassPath)
+ extends SymbolLoader {
+ override def sourceModule(implicit ctx: Context) = _sourceModule
+ def description = "package loader " + classpath.name
+
+ private[core] val currentDecls: MutableScope = newScope
+
+ def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = {
+ assert(root is PackageClass, root)
+ def maybeModuleClass(classRep: ClassPath#ClassRep) = classRep.name.last == '$'
+ val pre = root.owner.thisType
+ root.info = ClassInfo(pre, root.symbol.asClass, Nil, currentDecls, pre select sourceModule)
+ if (!sourceModule.isCompleted)
+ sourceModule.completer.complete(sourceModule)
+ if (!root.isRoot) {
+ for (classRep <- classpath.classes)
+ if (!maybeModuleClass(classRep))
+ initializeFromClassPath(root.symbol, classRep)
+ for (classRep <- classpath.classes)
+ if (maybeModuleClass(classRep) && !root.unforcedDecls.lookup(classRep.name.toTypeName).exists)
+ initializeFromClassPath(root.symbol, classRep)
+ }
+ if (!root.isEmptyPackage)
+ for (pkg <- classpath.packages)
+ enterPackage(root.symbol, pkg)
+ }
+ }
+}
+
+/** A lazy type that completes itself by calling parameter doComplete.
+ * Any linked modules/classes or module classes are also initialized.
+ */
+abstract class SymbolLoader extends LazyType {
+
+ /** Load source or class file for `root`, return */
+ def doComplete(root: SymDenotation)(implicit ctx: Context): Unit
+
+ def sourceFileOrNull: AbstractFile = null
+
+ /** Description of the resource (ClassPath, AbstractFile)
+ * being processed by this loader
+ */
+ def description: String
+
+ override def complete(root: SymDenotation)(implicit ctx: Context): Unit = {
+ def signalError(ex: Exception): Unit = {
+ if (ctx.debug) ex.printStackTrace()
+ val msg = ex.getMessage()
+ ctx.error(
+ if (msg eq null) "i/o error while loading " + root.name
+ else "error while loading " + root.name + ",\n " + msg)
+ }
+ try {
+ val start = currentTime
+ if (ctx.settings.debugTrace.value)
+ ctx.doTraceIndented(s">>>> loading ${root.debugString}", _ => s"<<<< loaded ${root.debugString}") {
+ doComplete(root)
+ }
+ else
+ doComplete(root)
+ ctx.informTime("loaded " + description, start)
+ } catch {
+ case ex: IOException =>
+ signalError(ex)
+ case NonFatal(ex) =>
+ println(s"exception caught when loading $root: $ex")
+ throw ex
+ } finally {
+ def postProcess(denot: SymDenotation) =
+ if (!denot.isCompleted &&
+ !denot.completer.isInstanceOf[SymbolLoaders.SecondCompleter])
+ denot.markAbsent()
+ postProcess(root)
+ if (!root.isRoot)
+ postProcess(root.scalacLinkedClass.denot)
+ }
+ }
+}
+
+class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
+
+ override def sourceFileOrNull: AbstractFile = classfile
+
+ def description = "class file " + classfile.toString
+
+ def rootDenots(rootDenot: ClassDenotation)(implicit ctx: Context): (ClassDenotation, ClassDenotation) = {
+ val linkedDenot = rootDenot.scalacLinkedClass.denot match {
+ case d: ClassDenotation => d
+ case d =>
+ // this can happen if the companion if shadowed by a val or type
+ // in a package object; in this case, we make up some dummy denotation
+ // as a stand in for loading.
+ // An example for this situation is scala.reflect.Manifest, which exists
+ // as a class in scala.reflect and as a val in scala.reflect.package.
+ if (rootDenot is ModuleClass)
+ ctx.newClassSymbol(
+ rootDenot.owner, rootDenot.name.stripModuleClassSuffix.asTypeName, Synthetic,
+ _ => NoType).classDenot
+ else
+ ctx.newModuleSymbol(
+ rootDenot.owner, rootDenot.name.toTermName, Synthetic, Synthetic,
+ (module, _) => new NoCompleter() withDecls newScope withSourceModule (_ => module))
+ .moduleClass.denot.asClass
+ }
+ if (rootDenot is ModuleClass) (linkedDenot, rootDenot)
+ else (rootDenot, linkedDenot)
+ }
+
+ override def doComplete(root: SymDenotation)(implicit ctx: Context): Unit =
+ load(root)
+
+ def load(root: SymDenotation)(implicit ctx: Context): Option[ClassfileParser.Embedded] = {
+ val (classRoot, moduleRoot) = rootDenots(root.asClass)
+ new ClassfileParser(classfile, classRoot, moduleRoot)(ctx).run()
+ }
+}
+
+class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
+ def description = "source file " + srcfile.toString
+ override def sourceFileOrNull = srcfile
+ def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = unsupported("doComplete")
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala
new file mode 100644
index 000000000..b5bd196d2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala
@@ -0,0 +1,602 @@
+package dotty.tools
+package dotc
+package core
+
+import Periods._
+import Names._
+import Scopes._
+import Flags._
+import java.lang.AssertionError
+import Decorators._
+import Symbols._
+import Contexts._
+import SymDenotations._
+import printing.Texts._
+import printing.Printer
+import Types._
+import Annotations._
+import util.Positions._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import ast.tpd.Tree
+import ast.TreeTypeMap
+import Constants.Constant
+import Denotations.{ Denotation, SingleDenotation, MultiDenotation }
+import collection.mutable
+import io.AbstractFile
+import language.implicitConversions
+import util.{NoSource, DotClass}
+
+/** Creation methods for symbols */
+trait Symbols { this: Context =>
+
+// ---- Factory methods for symbol creation ----------------------
+//
+// All symbol creations should be done via the next two methods.
+
+ /** Create a symbol without a denotation.
+ * Note this uses a cast instead of a direct type refinement because
+ * it's debug-friendlier not to create an anonymous class here.
+ */
+ def newNakedSymbol[N <: Name](coord: Coord = NoCoord)(implicit ctx: Context): Symbol { type ThisName = N } =
+ new Symbol(coord, ctx.nextId).asInstanceOf[Symbol { type ThisName = N }]
+
+ /** Create a class symbol without a denotation. */
+ def newNakedClassSymbol(coord: Coord = NoCoord, assocFile: AbstractFile = null)(implicit ctx: Context) =
+ new ClassSymbol(coord, assocFile, ctx.nextId)
+
+// ---- Symbol creation methods ----------------------------------
+
+ /** Create a symbol from its fields (info may be lazy) */
+ def newSymbol[N <: Name](
+ owner: Symbol,
+ name: N,
+ flags: FlagSet,
+ info: Type,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord): Symbol { type ThisName = N } = {
+ val sym = newNakedSymbol[N](coord)
+ val denot = SymDenotation(sym, owner, name, flags, info, privateWithin)
+ sym.denot = denot
+ sym
+ }
+
+ /** Create a class symbol from a function producing its denotation */
+ def newClassSymbolDenoting(denotFn: ClassSymbol => SymDenotation, coord: Coord = NoCoord, assocFile: AbstractFile = null): ClassSymbol = {
+ val cls = newNakedClassSymbol(coord, assocFile)
+ cls.denot = denotFn(cls)
+ cls
+ }
+
+ /** Create a class symbol from its non-info fields and a function
+ * producing its info (the produced info may be lazy).
+ */
+ def newClassSymbol(
+ owner: Symbol,
+ name: TypeName,
+ flags: FlagSet,
+ infoFn: ClassSymbol => Type,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): ClassSymbol
+ = {
+ val cls = newNakedClassSymbol(coord, assocFile)
+ val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin)
+ cls.denot = denot
+ cls
+ }
+
+ /** Create a class symbol from its non-info fields and the fields of its info. */
+ def newCompleteClassSymbol(
+ owner: Symbol,
+ name: TypeName,
+ flags: FlagSet,
+ parents: List[TypeRef],
+ decls: Scope = newScope,
+ selfInfo: Type = NoType,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): ClassSymbol =
+ newClassSymbol(
+ owner, name, flags,
+ ClassInfo(owner.thisType, _, parents, decls, selfInfo),
+ privateWithin, coord, assocFile)
+
+ /** Same as `newCompleteClassSymbol` except that `parents` can be a list of arbitrary
+ * types which get normalized into type refs and parameter bindings.
+ */
+ def newNormalizedClassSymbol(
+ owner: Symbol,
+ name: TypeName,
+ flags: FlagSet,
+ parentTypes: List[Type],
+ decls: Scope = newScope,
+ selfInfo: Type = NoType,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): ClassSymbol = {
+ def completer = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val cls = denot.asClass.classSymbol
+ val decls = newScope
+ val parentRefs: List[TypeRef] = normalizeToClassRefs(parentTypes, cls, decls)
+ denot.info = ClassInfo(owner.thisType, cls, parentRefs, decls)
+ }
+ }
+ newClassSymbol(owner, name, flags, completer, privateWithin, coord, assocFile)
+ }
+
+ /** Create a module symbol with associated module class
+ * from its non-info fields and a function producing the info
+ * of the module class (this info may be lazy).
+ */
+ def newModuleSymbol(
+ owner: Symbol,
+ name: TermName,
+ modFlags: FlagSet,
+ clsFlags: FlagSet,
+ infoFn: (TermSymbol, ClassSymbol) => Type, // typically a ModuleClassCompleterWithDecls
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): TermSymbol
+ = {
+ val base = owner.thisType
+ val module = newNakedSymbol[TermName](coord)
+ val modcls = newNakedClassSymbol(coord, assocFile)
+ val modclsFlags = clsFlags | ModuleClassCreationFlags
+ val modclsName = name.toTypeName.adjustIfModuleClass(modclsFlags)
+ val cdenot = SymDenotation(
+ modcls, owner, modclsName, modclsFlags,
+ infoFn(module, modcls), privateWithin)
+ val mdenot = SymDenotation(
+ module, owner, name, modFlags | ModuleCreationFlags,
+ if (cdenot.isCompleted) TypeRef.withSymAndName(owner.thisType, modcls, modclsName)
+ else new ModuleCompleter(modcls))
+ module.denot = mdenot
+ modcls.denot = cdenot
+ module
+ }
+
+ /** Create a module symbol with associated module class
+ * from its non-info fields and the fields of the module class info.
+ * @param flags The combined flags of the module and the module class
+ * These are masked with RetainedModuleValFlags/RetainedModuleClassFlags.
+ */
+ def newCompleteModuleSymbol(
+ owner: Symbol,
+ name: TermName,
+ modFlags: FlagSet,
+ clsFlags: FlagSet,
+ parents: List[TypeRef],
+ decls: Scope,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): TermSymbol =
+ newModuleSymbol(
+ owner, name, modFlags, clsFlags,
+ (module, modcls) => ClassInfo(
+ owner.thisType, modcls, parents, decls, TermRef.withSymAndName(owner.thisType, module, name)),
+ privateWithin, coord, assocFile)
+
+ val companionMethodFlags = Flags.Synthetic | Flags.Private | Flags.Method
+
+ def synthesizeCompanionMethod(name: Name, target: SymDenotation, owner: SymDenotation)(implicit ctx: Context) =
+ if (owner.exists && target.exists && !owner.isAbsent && !target.isAbsent) {
+ val existing = owner.unforcedDecls.lookup(name)
+
+ existing.orElse{
+ ctx.newSymbol(owner.symbol, name, companionMethodFlags , ExprType(target.typeRef))
+ }
+ } else NoSymbol
+
+ /** Create a package symbol with associated package class
+ * from its non-info fields and a lazy type for loading the package's members.
+ */
+ def newPackageSymbol(
+ owner: Symbol,
+ name: TermName,
+ infoFn: (TermSymbol, ClassSymbol) => LazyType): TermSymbol =
+ newModuleSymbol(owner, name, PackageCreationFlags, PackageCreationFlags, infoFn)
+
+ /** Create a package symbol with associated package class
+ * from its non-info fields its member scope.
+ */
+ def newCompletePackageSymbol(
+ owner: Symbol,
+ name: TermName,
+ modFlags: FlagSet = EmptyFlags,
+ clsFlags: FlagSet = EmptyFlags,
+ decls: Scope = newScope): TermSymbol =
+ newCompleteModuleSymbol(
+ owner, name,
+ modFlags | PackageCreationFlags, clsFlags | PackageCreationFlags,
+ Nil, decls)
+
+
+ /** Create a stub symbol that will issue a missing reference error
+ * when attempted to be completed.
+ */
+ def newStubSymbol(owner: Symbol, name: Name, file: AbstractFile = null): Symbol = {
+ def stubCompleter = new StubInfo()
+ val normalizedOwner = if (owner is ModuleVal) owner.moduleClass else owner
+ println(s"creating stub for ${name.show}, owner = ${normalizedOwner.denot.debugString}, file = $file")
+ println(s"decls = ${normalizedOwner.unforcedDecls.toList.map(_.debugString).mkString("\n ")}") // !!! DEBUG
+ //if (base.settings.debug.value) throw new Error()
+ val stub = name match {
+ case name: TermName =>
+ newModuleSymbol(normalizedOwner, name, EmptyFlags, EmptyFlags, stubCompleter, assocFile = file)
+ case name: TypeName =>
+ newClassSymbol(normalizedOwner, name, EmptyFlags, stubCompleter, assocFile = file)
+ }
+ stubs = stub :: stubs
+ stub
+ }
+
+ /** Create the local template dummy of given class `cls`.
+ * In a template
+ *
+ * trait T { val fld: Int; { val x: int = 2 }; val fld2 = { val y = 2; y }}
+ *
+ * the owner of `x` is the local dummy of the template. The owner of the local
+ * dummy is then the class of the template itself. By contrast, the owner of `y`
+ * would be `fld2`. There is a single local dummy per template.
+ */
+ def newLocalDummy(cls: Symbol, coord: Coord = NoCoord) =
+ newSymbol(cls, nme.localDummyName(cls), EmptyFlags, NoType)
+
+ /** Create an import symbol pointing back to given qualifier `expr`. */
+ def newImportSymbol(owner: Symbol, expr: Tree, coord: Coord = NoCoord) =
+ newSymbol(owner, nme.IMPORT, EmptyFlags, ImportType(expr), coord = coord)
+
+ /** Create a class constructor symbol for given class `cls`. */
+ def newConstructor(cls: ClassSymbol, flags: FlagSet, paramNames: List[TermName], paramTypes: List[Type], privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord) =
+ newSymbol(cls, nme.CONSTRUCTOR, flags | Method, MethodType(paramNames, paramTypes)(_ => cls.typeRef), privateWithin, coord)
+
+ /** Create an empty default constructor symbol for given class `cls`. */
+ def newDefaultConstructor(cls: ClassSymbol) =
+ newConstructor(cls, EmptyFlags, Nil, Nil)
+
+ /** Create a symbol representing a selftype declaration for class `cls`. */
+ def newSelfSym(cls: ClassSymbol, name: TermName = nme.WILDCARD, selfInfo: Type = NoType): TermSymbol =
+ ctx.newSymbol(cls, name, SelfSymFlags, selfInfo orElse cls.classInfo.selfType, coord = cls.coord)
+
+ /** Create new type parameters with given owner, names, and flags.
+ * @param boundsFn A function that, given type refs to the newly created
+ * parameters returns a list of their bounds.
+ */
+ def newTypeParams(
+ owner: Symbol,
+ names: List[TypeName],
+ flags: FlagSet,
+ boundsFn: List[TypeRef] => List[Type]): List[TypeSymbol] = {
+
+ val tparamBuf = new mutable.ListBuffer[TypeSymbol]
+ val trefBuf = new mutable.ListBuffer[TypeRef]
+ for (name <- names) {
+ val tparam = newNakedSymbol[TypeName](NoCoord)
+ tparamBuf += tparam
+ trefBuf += TypeRef.withSymAndName(owner.thisType, tparam, name)
+ }
+ val tparams = tparamBuf.toList
+ val bounds = boundsFn(trefBuf.toList)
+ for ((name, tparam, bound) <- (names, tparams, bounds).zipped)
+ tparam.denot = SymDenotation(tparam, owner, name, flags | owner.typeParamCreationFlags, bound)
+ tparams
+ }
+
+ /** Create a new skolem symbol. This is not the same as SkolemType, even though the
+ * motivation (create a singleton referencing to a type) is similar.
+ */
+ def newSkolem(tp: Type) = newSymbol(defn.RootClass, nme.SKOLEM, SyntheticArtifact | Permanent, tp)
+
+ def newErrorSymbol(owner: Symbol, name: Name) =
+ newSymbol(owner, name, SyntheticArtifact,
+ if (name.isTypeName) TypeAlias(ErrorType) else ErrorType)
+
+ /** Map given symbols, subjecting their attributes to the mappings
+ * defined in the given TreeTypeMap `ttmap`.
+ * Cross symbol references are brought over from originals to copies.
+ * Do not copy any symbols if all attributes of all symbols stay the same.
+ */
+ def mapSymbols(originals: List[Symbol], ttmap: TreeTypeMap, mapAlways: Boolean = false): List[Symbol] =
+ if (originals.forall(sym =>
+ (ttmap.mapType(sym.info) eq sym.info) &&
+ !(ttmap.oldOwners contains sym.owner)) && !mapAlways)
+ originals
+ else {
+ val copies: List[Symbol] = for (original <- originals) yield
+ original match {
+ case original: ClassSymbol =>
+ newNakedClassSymbol(original.coord, original.assocFile)
+ case _ =>
+ newNakedSymbol[original.ThisName](original.coord)
+ }
+ val ttmap1 = ttmap.withSubstitution(originals, copies)
+ (originals, copies).zipped foreach {(original, copy) =>
+ copy.denot = original.denot // preliminary denotation, so that we can access symbols in subsequent transform
+ }
+ (originals, copies).zipped foreach {(original, copy) =>
+ val odenot = original.denot
+ val oinfo = original.info match {
+ case ClassInfo(pre, _, parents, decls, selfInfo) =>
+ assert(original.isClass)
+ ClassInfo(pre, copy.asClass, parents, decls.cloneScope, selfInfo)
+ case oinfo => oinfo
+ }
+ copy.denot = odenot.copySymDenotation(
+ symbol = copy,
+ owner = ttmap1.mapOwner(odenot.owner),
+ initFlags = odenot.flags &~ Frozen | Fresh,
+ info = ttmap1.mapType(oinfo),
+ privateWithin = ttmap1.mapOwner(odenot.privateWithin), // since this refers to outer symbols, need not include copies (from->to) in ownermap here.
+ annotations = odenot.annotations.mapConserve(ttmap1.apply))
+ }
+ copies
+ }
+
+// ----- Locating predefined symbols ----------------------------------------
+
+ def requiredPackage(path: PreName): TermSymbol =
+ base.staticRef(path.toTermName).requiredSymbol(_ is Package).asTerm
+
+ def requiredPackageRef(path: PreName): TermRef = requiredPackage(path).termRef
+
+ def requiredClass(path: PreName): ClassSymbol =
+ base.staticRef(path.toTypeName).requiredSymbol(_.isClass).asClass
+
+ def requiredClassRef(path: PreName): TypeRef = requiredClass(path).typeRef
+
+ /** Get ClassSymbol if class is either defined in current compilation run
+ * or present on classpath.
+ * Returns NoSymbol otherwise. */
+ def getClassIfDefined(path: PreName): Symbol =
+ base.staticRef(path.toTypeName, generateStubs = false).requiredSymbol(_.isClass, generateStubs = false)
+
+ def requiredModule(path: PreName): TermSymbol =
+ base.staticRef(path.toTermName).requiredSymbol(_ is Module).asTerm
+
+ def requiredModuleRef(path: PreName): TermRef = requiredModule(path).termRef
+}
+
+object Symbols {
+
+ implicit def eqSymbol: Eq[Symbol, Symbol] = Eq
+
+ /** A Symbol represents a Scala definition/declaration or a package.
+ * @param coord The coordinates of the symbol (a position or an index)
+ * @param id A unique identifier of the symbol (unique per ContextBase)
+ */
+ class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with TypeParamInfo with printing.Showable {
+
+ type ThisName <: Name
+
+ //assert(id != 4285)
+
+ /** The last denotation of this symbol */
+ private[this] var lastDenot: SymDenotation = _
+
+ /** Set the denotation of this symbol */
+ private[core] def denot_=(d: SymDenotation) =
+ lastDenot = d
+
+ /** The current denotation of this symbol */
+ final def denot(implicit ctx: Context): SymDenotation = {
+ var denot = lastDenot
+ if (!(denot.validFor contains ctx.period)) {
+ denot = denot.current.asInstanceOf[SymDenotation]
+ lastDenot = denot
+ }
+ denot
+ }
+
+ private[core] def defRunId: RunId =
+ if (lastDenot == null) NoRunId else lastDenot.validFor.runId
+
+ /** Does this symbol come from a currently compiled source file? */
+ final def isDefinedInCurrentRun(implicit ctx: Context): Boolean = {
+ pos.exists && defRunId == ctx.runId
+ }
+
+ /** Subclass tests and casts */
+ final def isTerm(implicit ctx: Context): Boolean =
+ (if (defRunId == ctx.runId) lastDenot else denot).isTerm
+
+ final def isType(implicit ctx: Context): Boolean =
+ (if (defRunId == ctx.runId) lastDenot else denot).isType
+
+ final def isClass: Boolean = isInstanceOf[ClassSymbol]
+
+ final def asTerm(implicit ctx: Context): TermSymbol = { assert(isTerm, s"asTerm called on not-a-Term $this" ); asInstanceOf[TermSymbol] }
+ final def asType(implicit ctx: Context): TypeSymbol = { assert(isType, s"isType called on not-a-Type $this"); asInstanceOf[TypeSymbol] }
+ final def asClass: ClassSymbol = asInstanceOf[ClassSymbol]
+
+ final def isFresh(implicit ctx: Context) =
+ lastDenot != null && (lastDenot is Fresh)
+
+ /** Special cased here, because it may be used on naked symbols in substituters */
+ final def isStatic(implicit ctx: Context): Boolean =
+ lastDenot != null && denot.isStatic
+
+ /** A unique, densely packed integer tag for each class symbol, -1
+ * for all other symbols. To save memory, this method
+ * should be called only if class is a super class of some other class.
+ */
+ def superId(implicit ctx: Context): Int = -1
+
+ /** This symbol entered into owner's scope (owner must be a class). */
+ final def entered(implicit ctx: Context): this.type = {
+ assert(this.owner.isClass, s"symbol ($this) entered the scope of non-class owner ${this.owner}") // !!! DEBUG
+ this.owner.asClass.enter(this)
+ if (this.is(Module, butNot = Package)) this.owner.asClass.enter(this.moduleClass)
+ this
+ }
+
+ /** Enter this symbol in its class owner after given `phase`. Create a fresh
+ * denotation for its owner class if the class has not yet already one
+ * that starts being valid after `phase`.
+ * @pre Symbol is a class member
+ */
+ def enteredAfter(phase: DenotTransformer)(implicit ctx: Context): this.type =
+ if (ctx.phaseId != phase.next.id) enteredAfter(phase)(ctx.withPhase(phase.next))
+ else {
+ if (this.owner.is(Package)) {
+ denot.validFor |= InitialPeriod
+ if (this is Module) this.moduleClass.validFor |= InitialPeriod
+ }
+ else this.owner.asClass.ensureFreshScopeAfter(phase)
+ entered
+ }
+
+ /** This symbol, if it exists, otherwise the result of evaluating `that` */
+ def orElse(that: => Symbol)(implicit ctx: Context) =
+ if (this.exists) this else that
+
+ /** If this symbol satisfies predicate `p` this symbol, otherwise `NoSymbol` */
+ def filter(p: Symbol => Boolean): Symbol = if (p(this)) this else NoSymbol
+
+ /** The current name of this symbol */
+ final def name(implicit ctx: Context): ThisName = denot.name.asInstanceOf[ThisName]
+
+ /** The source or class file from which this class or
+ * the class containing this symbol was generated, null if not applicable.
+ * Overridden in ClassSymbol
+ */
+ def associatedFile(implicit ctx: Context): AbstractFile =
+ denot.topLevelClass.symbol.associatedFile
+
+ /** The class file from which this class was generated, null if not applicable. */
+ final def binaryFile(implicit ctx: Context): AbstractFile = {
+ val file = associatedFile
+ if (file != null && file.path.endsWith("class")) file else null
+ }
+
+ /** The source file from which this class was generated, null if not applicable. */
+ final def sourceFile(implicit ctx: Context): AbstractFile = {
+ val file = associatedFile
+ if (file != null && !file.path.endsWith("class")) file
+ else denot.topLevelClass.getAnnotation(defn.SourceFileAnnot) match {
+ case Some(sourceAnnot) => sourceAnnot.argumentConstant(0) match {
+ case Some(Constant(path: String)) => AbstractFile.getFile(path)
+ case none => null
+ }
+ case none => null
+ }
+ }
+
+ /** The position of this symbol, or NoPosition is symbol was not loaded
+ * from source.
+ */
+ def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition
+
+ // TypeParamInfo methods
+ def isTypeParam(implicit ctx: Context) = denot.is(TypeParam)
+ def paramName(implicit ctx: Context) = name.asTypeName
+ def paramBounds(implicit ctx: Context) = denot.info.bounds
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = denot.infoOrCompleter
+ def paramVariance(implicit ctx: Context) = denot.variance
+ def paramRef(implicit ctx: Context) = denot.typeRef
+
+// -------- Printing --------------------------------------------------------
+
+ /** The prefix string to be used when displaying this symbol without denotation */
+ protected def prefixString = "Symbol"
+
+ override def toString: String =
+ if (lastDenot == null) s"Naked$prefixString#$id"
+ else lastDenot.toString// + "#" + id // !!! DEBUG
+
+ def toText(printer: Printer): Text = printer.toText(this)
+
+ def showLocated(implicit ctx: Context): String = ctx.locatedText(this).show
+ def showExtendedLocation(implicit ctx: Context): String = ctx.extendedLocationText(this).show
+ def showDcl(implicit ctx: Context): String = ctx.dclText(this).show
+ def showKind(implicit ctx: Context): String = ctx.kindString(this)
+ def showName(implicit ctx: Context): String = ctx.nameString(this)
+ def showFullName(implicit ctx: Context): String = ctx.fullNameString(this)
+
+ override def hashCode() = id // for debugging.
+ }
+
+ type TermSymbol = Symbol { type ThisName = TermName }
+ type TypeSymbol = Symbol { type ThisName = TypeName }
+
+ class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile, id: Int)
+ extends Symbol(coord, id) {
+
+ type ThisName = TypeName
+
+ /** The source or class file from which this class was generated, null if not applicable. */
+ override def associatedFile(implicit ctx: Context): AbstractFile =
+ if (assocFile != null || (this.owner is PackageClass) || this.isEffectiveRoot) assocFile
+ else super.associatedFile
+
+ final def classDenot(implicit ctx: Context): ClassDenotation =
+ denot.asInstanceOf[ClassDenotation]
+
+ private var superIdHint: Int = -1
+
+ override def superId(implicit ctx: Context): Int = {
+ val hint = superIdHint
+ if (hint >= 0 && hint <= ctx.lastSuperId && (ctx.classOfId(hint) eq this))
+ hint
+ else {
+ val id = ctx.superIdOfClass get this match {
+ case Some(id) =>
+ id
+ case None =>
+ val id = ctx.nextSuperId
+ ctx.superIdOfClass(this) = id
+ ctx.classOfId(id) = this
+ id
+ }
+ superIdHint = id
+ id
+ }
+ }
+
+ override protected def prefixString = "ClassSymbol"
+ }
+
+ class ErrorSymbol(val underlying: Symbol, msg: => String)(implicit ctx: Context) extends Symbol(NoCoord, ctx.nextId) {
+ type ThisName = underlying.ThisName
+ denot = underlying.denot
+ }
+
+ @sharable object NoSymbol extends Symbol(NoCoord, 0) {
+ denot = NoDenotation
+
+ override def associatedFile(implicit ctx: Context): AbstractFile = NoSource.file
+ }
+
+ implicit class Copier[N <: Name](sym: Symbol { type ThisName = N })(implicit ctx: Context) {
+ /** Copy a symbol, overriding selective fields */
+ def copy(
+ owner: Symbol = sym.owner,
+ name: N = sym.name,
+ flags: FlagSet = sym.flags,
+ info: Type = sym.info,
+ privateWithin: Symbol = sym.privateWithin,
+ coord: Coord = sym.coord,
+ associatedFile: AbstractFile = sym.associatedFile): Symbol =
+ if (sym.isClass)
+ ctx.newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord, associatedFile)
+ else
+ ctx.newSymbol(owner, name, flags, info, privateWithin, coord)
+ }
+
+ /** Makes all denotation operations available on symbols */
+ implicit def toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation = sym.denot
+
+ /** Makes all class denotations available on class symbols */
+ implicit def toClassDenot(cls: ClassSymbol)(implicit ctx: Context): ClassDenotation = cls.classDenot
+
+ /** The Definitions object */
+ def defn(implicit ctx: Context): Definitions = ctx.definitions
+
+ /** The current class */
+ def currentClass(implicit ctx: Context): ClassSymbol = ctx.owner.enclosingClass.asClass
+
+ @sharable var stubs: List[Symbol] = Nil // diagnostic only
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
new file mode 100644
index 000000000..70819e590
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -0,0 +1,688 @@
+package dotty.tools.dotc
+package core
+
+import Types._
+import Contexts._
+import Symbols._
+import SymDenotations.{LazyType, TypeParamsCompleter}
+import Decorators._
+import util.Stats._
+import util.common._
+import Names._
+import NameOps._
+import Flags._
+import StdNames.tpnme
+import util.Positions.Position
+import config.Printers.core
+import collection.mutable
+import dotty.tools.dotc.config.Config
+import java.util.NoSuchElementException
+
+object TypeApplications {
+
+ /** Assert type is not a TypeBounds instance and return it unchanged */
+ val noBounds = (tp: Type) => tp match {
+ case tp: TypeBounds => throw new AssertionError("no TypeBounds allowed")
+ case _ => tp
+ }
+
+ /** If `tp` is a TypeBounds instance return its lower bound else return `tp` */
+ val boundsToLo = (tp: Type) => tp match {
+ case tp: TypeBounds => tp.lo
+ case _ => tp
+ }
+
+ /** If `tp` is a TypeBounds instance return its upper bound else return `tp` */
+ val boundsToHi = (tp: Type) => tp match {
+ case tp: TypeBounds => tp.hi
+ case _ => tp
+ }
+
+ /** Does variance `v1` conform to variance `v2`?
+ * This is the case if the variances are the same or `sym` is nonvariant.
+ */
+ def varianceConforms(v1: Int, v2: Int): Boolean =
+ v1 == v2 || v2 == 0
+
+ /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`?
+ */
+ def varianceConforms(tparam1: TypeParamInfo, tparam2: TypeParamInfo)(implicit ctx: Context): Boolean =
+ varianceConforms(tparam1.paramVariance, tparam2.paramVariance)
+
+ /** Do the variances of type parameters `tparams1` conform to the variances
+ * of corresponding type parameters `tparams2`?
+ * This is only the case of `tparams1` and `tparams2` have the same length.
+ */
+ def variancesConform(tparams1: List[TypeParamInfo], tparams2: List[TypeParamInfo])(implicit ctx: Context): Boolean =
+ tparams1.corresponds(tparams2)(varianceConforms)
+
+ /** Extractor for
+ *
+ * [v1 X1: B1, ..., vn Xn: Bn] -> C[X1, ..., Xn]
+ *
+ * where v1, ..., vn and B1, ..., Bn are the variances and bounds of the type parameters
+ * of the class C.
+ *
+ * @param tycon C
+ */
+ object EtaExpansion {
+ def apply(tycon: Type)(implicit ctx: Context) = {
+ assert(tycon.typeParams.nonEmpty, tycon)
+ tycon.EtaExpand(tycon.typeParamSymbols)
+ }
+
+ def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = tp match {
+ case tp @ PolyType(tparams, AppliedType(fn: TypeRef, args)) if (args == tparams.map(_.toArg)) => Some(fn)
+ case _ => None
+ }
+ }
+
+ /** Extractor for type application T[U_1, ..., U_n]. This is the refined type
+ *
+ * T { type p_1 v_1= U_1; ...; type p_n v_n= U_n }
+ *
+ * where v_i, p_i are the variances and names of the type parameters of T.
+ */
+ object AppliedType {
+ def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args)
+
+ def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match {
+ case tp: RefinedType =>
+ var refinements: List[RefinedType] = Nil
+ var tycon = tp.stripTypeVar
+ while (tycon.isInstanceOf[RefinedType]) {
+ val rt = tycon.asInstanceOf[RefinedType]
+ refinements = rt :: refinements
+ tycon = rt.parent.stripTypeVar
+ }
+ def collectArgs(tparams: List[TypeParamInfo],
+ refinements: List[RefinedType],
+ argBuf: mutable.ListBuffer[Type]): Option[(Type, List[Type])] = refinements match {
+ case Nil if tparams.isEmpty && argBuf.nonEmpty =>
+ Some((tycon, argBuf.toList))
+ case RefinedType(_, rname, rinfo) :: refinements1
+ if tparams.nonEmpty && rname == tparams.head.paramName =>
+ collectArgs(tparams.tail, refinements1, argBuf += rinfo.argInfo)
+ case _ =>
+ None
+ }
+ collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type])
+ case HKApply(tycon, args) =>
+ Some((tycon, args))
+ case _ =>
+ None
+ }
+ }
+
+ /** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK
+ */
+ def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] =
+ if (tparams.isEmpty) args
+ else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramBoundsOrCompleter))
+
+ /** A type map that tries to reduce (part of) the result type of the type lambda `tycon`
+ * with the given `args`(some of which are wildcard arguments represented by type bounds).
+ * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument
+ * `>: L <: H` is substituted for a type lambda parameter `X` only under certain conditions.
+ *
+ * 1. If Mode.AllowLambdaWildcardApply is set:
+ * The wildcard argument is substituted only if `X` appears in a toplevel refinement of the form
+ *
+ * { type A = X }
+ *
+ * and there are no other occurrences of `X` in the reduced type. In that case
+ * the refinement above is replaced by
+ *
+ * { type A >: L <: U }
+ *
+ * The `allReplaced` field indicates whether all occurrences of type lambda parameters
+ * in the reduced type have been replaced with arguments.
+ *
+ * 2. If Mode.AllowLambdaWildcardApply is not set:
+ * All refinements of the form
+ *
+ * { type A = X }
+ *
+ * are replaced by:
+ *
+ * { type A >: L <: U }
+ *
+ * Any other occurrence of `X` in `tycon` is replaced by `U`, if the
+ * occurrence of `X` in `tycon` is covariant, or nonvariant, or by `L`,
+ * if the occurrence is contravariant.
+ *
+ * The idea is that the `AllowLambdaWildcardApply` mode is used to check whether
+ * a type can be soundly reduced, and to give an error or warning if that
+ * is not the case. By contrast, the default mode, with `AllowLambdaWildcardApply`
+ * not set, reduces all applications even if this yields a different type, so
+ * its postcondition is that no type parameters of `tycon` appear in the
+ * result type. Using this mode, we can guarantee that `appliedTo` will never
+ * produce a higher-kinded application with a type lambda as type constructor.
+ */
+ class Reducer(tycon: PolyType, args: List[Type])(implicit ctx: Context) extends TypeMap {
+ private var available = (0 until args.length).toSet
+ var allReplaced = true
+ def hasWildcardArg(p: PolyParam) =
+ p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds]
+ def canReduceWildcard(p: PolyParam) =
+ !ctx.mode.is(Mode.AllowLambdaWildcardApply) || available.contains(p.paramNum)
+ def apply(t: Type) = t match {
+ case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && canReduceWildcard(p) =>
+ available -= p.paramNum
+ args(p.paramNum)
+ case p: PolyParam if p.binder == tycon =>
+ args(p.paramNum) match {
+ case TypeBounds(lo, hi) =>
+ if (ctx.mode.is(Mode.AllowLambdaWildcardApply)) { allReplaced = false; p }
+ else if (variance < 0) lo
+ else hi
+ case arg =>
+ arg
+ }
+ case _: TypeBounds | _: HKApply =>
+ val saved = available
+ available = Set()
+ try mapOver(t)
+ finally available = saved
+ case _ =>
+ mapOver(t)
+ }
+ }
+}
+
+import TypeApplications._
+
+/** A decorator that provides methods for modeling type application */
+class TypeApplications(val self: Type) extends AnyVal {
+
+ /** The type parameters of this type are:
+ * For a ClassInfo type, the type parameters of its class.
+ * For a typeref referring to a class, the type parameters of the class.
+ * For a typeref referring to a Lambda class, the type parameters of
+ * its right hand side or upper bound.
+ * For a refinement type, the type parameters of its parent, dropping
+ * any type parameter that is-rebound by the refinement. "Re-bind" means:
+ * The refinement contains a TypeAlias for the type parameter, or
+ * it introduces bounds for the type parameter, and we are not in the
+ * special case of a type Lambda, where a LambdaTrait gets refined
+ * with the bounds on its hk args. See `LambdaAbstract`, where these
+ * types get introduced, and see `isBoundedLambda` below for the test.
+ */
+ final def typeParams(implicit ctx: Context): List[TypeParamInfo] = /*>|>*/ track("typeParams") /*<|<*/ {
+ self match {
+ case self: ClassInfo =>
+ self.cls.typeParams
+ case self: PolyType =>
+ self.typeParams
+ case self: TypeRef =>
+ val tsym = self.symbol
+ if (tsym.isClass) tsym.typeParams
+ else if (!tsym.isCompleting) tsym.info.typeParams
+ else Nil
+ case self: RefinedType =>
+ self.parent.typeParams.filterNot(_.paramName == self.refinedName)
+ case self: RecType =>
+ self.parent.typeParams
+ case _: SingletonType =>
+ Nil
+ case self: WildcardType =>
+ self.optBounds.typeParams
+ case self: TypeProxy =>
+ self.superType.typeParams
+ case _ =>
+ Nil
+ }
+ }
+
+ /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */
+ final def hkTypeParams(implicit ctx: Context): List[TypeParamInfo] =
+ if (isHK) typeParams else Nil
+
+ /** If `self` is a generic class, its type parameter symbols, otherwise Nil */
+ final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = typeParams match {
+ case (_: Symbol) :: _ =>
+ assert(typeParams.forall(_.isInstanceOf[Symbol]))
+ typeParams.asInstanceOf[List[TypeSymbol]]
+ case _ => Nil
+ }
+
+ /** The named type parameters declared or inherited by this type.
+ * These are all uninstantiated named type parameters of this type or one
+ * of its base types.
+ */
+ final def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = self match {
+ case self: ClassInfo =>
+ self.cls.namedTypeParams
+ case self: RefinedType =>
+ self.parent.namedTypeParams.filterNot(_.name == self.refinedName)
+ case self: SingletonType =>
+ Set()
+ case self: TypeProxy =>
+ self.underlying.namedTypeParams
+ case _ =>
+ Set()
+ }
+
+ /** The smallest supertype of this type that instantiated none of the named type parameters
+ * in `params`. That is, for each named type parameter `p` in `params`, either there is
+ * no type field named `p` in this type, or `p` is a named type parameter of this type.
+ * The first case is important for the recursive case of AndTypes, because some of their operands might
+ * be missing the named parameter altogether, but the AndType as a whole can still
+ * contain it.
+ */
+ final def widenToNamedTypeParams(params: Set[TypeSymbol])(implicit ctx: Context): Type = {
+
+ /** Is widening not needed for `tp`? */
+ def isOK(tp: Type) = {
+ val ownParams = tp.namedTypeParams
+ def isMissingOrOpen(param: TypeSymbol) = {
+ val ownParam = tp.nonPrivateMember(param.name).symbol
+ !ownParam.exists || ownParams.contains(ownParam.asType)
+ }
+ params.forall(isMissingOrOpen)
+ }
+
+ /** Widen type by forming the intersection of its widened parents */
+ def widenToParents(tp: Type) = {
+ val parents = tp.parents.map(p =>
+ tp.baseTypeWithArgs(p.symbol).widenToNamedTypeParams(params))
+ parents.reduceLeft(ctx.typeComparer.andType(_, _))
+ }
+
+ if (isOK(self)) self
+ else self match {
+ case self @ AppliedType(tycon, args) if !isOK(tycon) =>
+ widenToParents(self)
+ case self: TypeRef if self.symbol.isClass =>
+ widenToParents(self)
+ case self: RefinedType =>
+ val parent1 = self.parent.widenToNamedTypeParams(params)
+ if (params.exists(_.name == self.refinedName)) parent1
+ else self.derivedRefinedType(parent1, self.refinedName, self.refinedInfo)
+ case self: TypeProxy =>
+ self.superType.widenToNamedTypeParams(params)
+ case self: AndOrType =>
+ self.derivedAndOrType(
+ self.tp1.widenToNamedTypeParams(params), self.tp2.widenToNamedTypeParams(params))
+ }
+ }
+
+ /** Is self type higher-kinded (i.e. of kind != "*")? */
+ def isHK(implicit ctx: Context): Boolean = self.dealias match {
+ case self: TypeRef => self.info.isHK
+ case self: RefinedType => false
+ case self: PolyType => true
+ case self: SingletonType => false
+ case self: TypeVar =>
+ // Using `origin` instead of `underlying`, as is done for typeParams,
+ // avoids having to set ephemeral in some cases.
+ self.origin.isHK
+ case self: WildcardType => self.optBounds.isHK
+ case self: TypeProxy => self.superType.isHK
+ case _ => false
+ }
+
+ /** Dealias type if it can be done without forcing the TypeRef's info */
+ def safeDealias(implicit ctx: Context): Type = self match {
+ case self: TypeRef if self.denot.exists && self.symbol.isAliasType =>
+ self.superType.stripTypeVar.safeDealias
+ case _ =>
+ self
+ }
+
+ /** Lambda abstract `self` with given type parameters. Examples:
+ *
+ * type T[X] = U becomes type T = [X] -> U
+ * type T[X] >: L <: U becomes type T >: L <: ([X] -> U)
+ *
+ * TODO: Handle parameterized lower bounds
+ */
+ def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = {
+ def expand(tp: Type) =
+ PolyType(
+ tparams.map(_.paramName), tparams.map(_.paramVariance))(
+ tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tl.lifted(tparams, tp))
+ if (tparams.isEmpty) self
+ else self match {
+ case self: TypeAlias =>
+ self.derivedTypeAlias(expand(self.alias))
+ case self @ TypeBounds(lo, hi) =>
+ self.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else expand(lo),
+ expand(hi))
+ case _ => expand(self)
+ }
+ }
+
+ /** Convert a type constructor `TC` which has type parameters `T1, ..., Tn`
+ * in a context where type parameters `U1,...,Un` are expected to
+ *
+ * LambdaXYZ { Apply = TC[hk$0, ..., hk$n] }
+ *
+ * Here, XYZ corresponds to the variances of
+ * - `U1,...,Un` if the variances of `T1,...,Tn` are pairwise compatible with `U1,...,Un`,
+ * - `T1,...,Tn` otherwise.
+ * v1 is compatible with v2, if v1 = v2 or v2 is non-variant.
+ */
+ def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = {
+ val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParamSymbols
+ self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse)
+ //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}")
+ }
+
+ /** If self is not higher-kinded, eta expand it. */
+ def ensureHK(implicit ctx: Context): Type =
+ if (isHK) self else EtaExpansion(self)
+
+ /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */
+ def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = {
+ val hkParams = bound.hkTypeParams
+ if (hkParams.isEmpty) self
+ else self match {
+ case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length =>
+ EtaExpansion(self)
+ case _ => self
+ }
+ }
+
+ /** If argument A and type parameter P are higher-kinded, adapt the variances
+ * of A to those of P, ensuring that the variances of the type lambda A
+ * agree with the variances of corresponding higher-kinded type parameters of P. Example:
+ *
+ * class GenericCompanion[+CC[X]]
+ * GenericCompanion[List]
+ *
+ * with adaptHkVariances, the argument `List` will expand to
+ *
+ * [X] => List[X]
+ *
+ * instead of
+ *
+ * [+X] => List[X]
+ *
+ * even though `List` is covariant. This adaptation is necessary to ignore conflicting
+ * variances in overriding members that have types of hk-type parameters such as
+ * `GenericCompanion[GenTraversable]` or `GenericCompanion[ListBuffer]`.
+ * When checking overriding, we need to validate the subtype relationship
+ *
+ * GenericCompanion[[X] -> ListBuffer[X]] <: GenericCompanion[[+X] -> GenTraversable[X]]
+ *
+ * Without adaptation, this would be false, and hence an overriding error would
+ * result. But with adaptation, the rhs argument will be adapted to
+ *
+ * [X] -> GenTraversable[X]
+ *
+ * which makes the subtype test succeed. The crucial point here is that, since
+ * GenericCompanion only expects a non-variant CC, the fact that GenTraversable
+ * is covariant is irrelevant, so can be ignored.
+ */
+ def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = {
+ val hkParams = bound.hkTypeParams
+ if (hkParams.isEmpty) self
+ else {
+ def adaptArg(arg: Type): Type = arg match {
+ case arg @ PolyType(tparams, body) if
+ !tparams.corresponds(hkParams)(_.paramVariance == _.paramVariance) &&
+ tparams.corresponds(hkParams)(varianceConforms) =>
+ PolyType(tparams.map(_.paramName), hkParams.map(_.paramVariance))(
+ tl => arg.paramBounds.map(_.subst(arg, tl).bounds),
+ tl => arg.resultType.subst(arg, tl)
+ )
+ case arg @ TypeAlias(alias) =>
+ arg.derivedTypeAlias(adaptArg(alias))
+ case arg @ TypeBounds(lo, hi) =>
+ arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi))
+ case _ =>
+ arg
+ }
+ adaptArg(self)
+ }
+ }
+
+ /** The type representing
+ *
+ * T[U1, ..., Un]
+ *
+ * where
+ * @param self = `T`
+ * @param args = `U1,...,Un`
+ */
+ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ {
+ val typParams = self.typeParams
+ def matchParams(t: Type, tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): Type = args match {
+ case arg :: args1 =>
+ try {
+ val tparam :: tparams1 = tparams
+ matchParams(RefinedType(t, tparam.paramName, arg.toBounds(tparam)), tparams1, args1)
+ } catch {
+ case ex: MatchError =>
+ println(s"applied type mismatch: $self with underlying ${self.underlyingIfProxy}, args = $args, typeParams = $typParams") // !!! DEBUG
+ //println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}")
+ throw ex
+ }
+ case nil => t
+ }
+ val stripped = self.stripTypeVar
+ val dealiased = stripped.safeDealias
+ if (args.isEmpty || ctx.erasedTypes) self
+ else dealiased match {
+ case dealiased: PolyType =>
+ def tryReduce =
+ if (!args.exists(_.isInstanceOf[TypeBounds])) {
+ val followAlias = Config.simplifyApplications && {
+ dealiased.resType match {
+ case AppliedType(tyconBody, _) =>
+ variancesConform(typParams, tyconBody.typeParams)
+ // Reducing is safe for type inference, as kind of type constructor does not change
+ case _ => false
+ }
+ }
+ if ((dealiased eq stripped) || followAlias) dealiased.instantiate(args)
+ else HKApply(self, args)
+ }
+ else dealiased.resType match {
+ case AppliedType(tycon, args1) if tycon.safeDealias ne tycon =>
+ // In this case we should always dealias since we cannot handle
+ // higher-kinded applications to wildcard arguments.
+ dealiased
+ .derivedPolyType(resType = tycon.safeDealias.appliedTo(args1))
+ .appliedTo(args)
+ case _ =>
+ val reducer = new Reducer(dealiased, args)
+ val reduced = reducer(dealiased.resType)
+ if (reducer.allReplaced) reduced
+ else HKApply(dealiased, args)
+ }
+ tryReduce
+ case dealiased: AndOrType =>
+ dealiased.derivedAndOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args))
+ case dealiased: TypeAlias =>
+ dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args))
+ case dealiased: TypeBounds =>
+ dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args))
+ case dealiased: LazyRef =>
+ LazyRef(() => dealiased.ref.appliedTo(args))
+ case dealiased: WildcardType =>
+ dealiased
+ case dealiased: TypeRef if dealiased.symbol == defn.NothingClass =>
+ dealiased
+ case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] =>
+ HKApply(self, args)
+ case dealiased =>
+ matchParams(dealiased, typParams, args)
+ }
+ }
+
+ final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil)
+ final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil)
+
+ final def applyIfParameterized(args: List[Type])(implicit ctx: Context): Type =
+ if (typeParams.nonEmpty) appliedTo(args) else self
+
+ /** A cycle-safe version of `appliedTo` where computing type parameters do not force
+ * the typeconstructor. Instead, if the type constructor is completing, we make
+ * up hk type parameters matching the arguments. This is needed when unpickling
+ * Scala2 files such as `scala.collection.generic.Mapfactory`.
+ */
+ final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = self match {
+ case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting =>
+ HKApply(self, args)
+ case _ =>
+ appliedTo(args)
+ }
+
+ /** Turn this type, which is used as an argument for
+ * type parameter `tparam`, into a TypeBounds RHS
+ */
+ final def toBounds(tparam: TypeParamInfo)(implicit ctx: Context): TypeBounds = self match {
+ case self: TypeBounds => // this can happen for wildcard args
+ self
+ case _ =>
+ val v = tparam.paramVariance
+ /* Not neeeded.
+ if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self)
+ else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self)
+ else
+ */
+ TypeAlias(self, v)
+ }
+
+ /** The type arguments of this type's base type instance wrt. `base`.
+ * Existential types in arguments are returned as TypeBounds instances.
+ */
+ final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] =
+ if (self derivesFrom base)
+ self.dealias match {
+ case self: TypeRef if !self.symbol.isClass => self.superType.baseArgInfos(base)
+ case self: HKApply => self.superType.baseArgInfos(base)
+ case _ => base.typeParams.map(param => self.member(param.name).info.argInfo)
+ }
+ else
+ Nil
+
+ /** The type arguments of this type's base type instance wrt.`base`.
+ * Existential types in arguments are disallowed.
+ */
+ final def baseArgTypes(base: Symbol)(implicit ctx: Context): List[Type] =
+ baseArgInfos(base) mapConserve noBounds
+
+ /** The type arguments of this type's base type instance wrt.`base`.
+ * Existential types in arguments are approximated by their lower bound.
+ */
+ final def baseArgTypesLo(base: Symbol)(implicit ctx: Context): List[Type] =
+ baseArgInfos(base) mapConserve boundsToLo
+
+ /** The type arguments of this type's base type instance wrt.`base`.
+ * Existential types in arguments are approximated by their upper bound.
+ */
+ final def baseArgTypesHi(base: Symbol)(implicit ctx: Context): List[Type] =
+ baseArgInfos(base) mapConserve boundsToHi
+
+ /** The base type including all type arguments and applicable refinements
+ * of this type. Refinements are applicable if they refine a member of
+ * the parent type which furthermore is not a name-mangled type parameter.
+ * Existential types in arguments are returned as TypeBounds instances.
+ */
+ final def baseTypeWithArgs(base: Symbol)(implicit ctx: Context): Type = ctx.traceIndented(s"btwa ${self.show} wrt $base", core, show = true) {
+ def default = self.baseTypeRef(base).appliedTo(baseArgInfos(base))
+ self match {
+ case tp: TypeRef =>
+ tp.info match {
+ case TypeBounds(_, hi) => hi.baseTypeWithArgs(base)
+ case _ => default
+ }
+ case tp @ RefinedType(parent, name, _) if !tp.member(name).symbol.is(ExpandedTypeParam) =>
+ tp.wrapIfMember(parent.baseTypeWithArgs(base))
+ case tp: TermRef =>
+ tp.underlying.baseTypeWithArgs(base)
+ case tp: HKApply =>
+ tp.superType.baseTypeWithArgs(base)
+ case AndType(tp1, tp2) =>
+ tp1.baseTypeWithArgs(base) & tp2.baseTypeWithArgs(base)
+ case OrType(tp1, tp2) =>
+ tp1.baseTypeWithArgs(base) | tp2.baseTypeWithArgs(base)
+ case _ =>
+ default
+ }
+ }
+
+ /** Translate a type of the form From[T] to To[T], keep other types as they are.
+ * `from` and `to` must be static classes, both with one type parameter, and the same variance.
+ * Do the same for by name types => From[T] and => To[T]
+ */
+ def translateParameterized(from: ClassSymbol, to: ClassSymbol)(implicit ctx: Context): Type = self match {
+ case self @ ExprType(tp) =>
+ self.derivedExprType(tp.translateParameterized(from, to))
+ case _ =>
+ if (self.derivesFrom(from))
+ if (ctx.erasedTypes) to.typeRef
+ else RefinedType(to.typeRef, to.typeParams.head.name, self.member(from.typeParams.head.name).info)
+ else self
+ }
+
+ /** If this is repeated parameter type, its underlying Seq type,
+ * or, if isJava is true, Array type, else the type itself.
+ */
+ def underlyingIfRepeated(isJava: Boolean)(implicit ctx: Context): Type =
+ if (self.isRepeatedParam) {
+ val seqClass = if (isJava) defn.ArrayClass else defn.SeqClass
+ translateParameterized(defn.RepeatedParamClass, seqClass)
+ }
+ else self
+
+ /** If this is an encoding of a (partially) applied type, return its arguments,
+ * otherwise return Nil.
+ * Existential types in arguments are returned as TypeBounds instances.
+ */
+ final def argInfos(implicit ctx: Context): List[Type] = self match {
+ case AppliedType(tycon, args) => args
+ case _ => Nil
+ }
+
+ /** Argument types where existential types in arguments are disallowed */
+ def argTypes(implicit ctx: Context) = argInfos mapConserve noBounds
+
+ /** Argument types where existential types in arguments are approximated by their lower bound */
+ def argTypesLo(implicit ctx: Context) = argInfos mapConserve boundsToLo
+
+ /** Argument types where existential types in arguments are approximated by their upper bound */
+ def argTypesHi(implicit ctx: Context) = argInfos mapConserve boundsToHi
+
+ /** The core type without any type arguments.
+ * @param `typeArgs` must be the type arguments of this type.
+ */
+ final def withoutArgs(typeArgs: List[Type]): Type = self match {
+ case HKApply(tycon, args) => tycon
+ case _ =>
+ typeArgs match {
+ case _ :: typeArgs1 =>
+ val RefinedType(tycon, _, _) = self
+ tycon.withoutArgs(typeArgs1)
+ case nil =>
+ self
+ }
+ }
+
+ /** If this is the image of a type argument; recover the type argument,
+ * otherwise NoType.
+ */
+ final def argInfo(implicit ctx: Context): Type = self match {
+ case self: TypeAlias => self.alias
+ case self: TypeBounds => self
+ case _ => NoType
+ }
+
+ /** If this is a type alias, its underlying type, otherwise the type itself */
+ def dropAlias(implicit ctx: Context): Type = self match {
+ case TypeAlias(alias) => alias
+ case _ => self
+ }
+
+ /** The element type of a sequence or array */
+ def elemType(implicit ctx: Context): Type = self match {
+ case defn.ArrayOf(elemtp) => elemtp
+ case JavaArrayType(elemtp) => elemtp
+ case _ => baseArgInfos(defn.SeqClass).headOption.getOrElse(NoType)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
new file mode 100644
index 000000000..f78820fff
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -0,0 +1,1502 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._
+import Decorators._
+import StdNames.{nme, tpnme}
+import collection.mutable
+import util.{Stats, DotClass, SimpleMap}
+import config.Config
+import config.Printers.{typr, constr, subtyping, noPrinter}
+import TypeErasure.{erasedLub, erasedGlb}
+import TypeApplications._
+import scala.util.control.NonFatal
+
+/** Provides methods to compare types.
+ */
+class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
+ implicit val ctx: Context = initctx
+
+ val state = ctx.typerState
+ import state.constraint
+
+ private var pendingSubTypes: mutable.Set[(Type, Type)] = null
+ private var recCount = 0
+
+ private var needsGc = false
+
+ /** Is a subtype check in progress? In that case we may not
+ * permanently instantiate type variables, because the corresponding
+ * constraint might still be retracted and the instantiation should
+ * then be reversed.
+ */
+ def subtypeCheckInProgress: Boolean = {
+ val result = recCount > 0
+ if (result) {
+ constr.println("*** needsGC ***")
+ needsGc = true
+ }
+ result
+ }
+
+ /** For statistics: count how many isSubTypes are part of successful comparisons */
+ private var successCount = 0
+ private var totalCount = 0
+
+ private var myAnyClass: ClassSymbol = null
+ private var myNothingClass: ClassSymbol = null
+ private var myNullClass: ClassSymbol = null
+ private var myObjectClass: ClassSymbol = null
+ private var myAnyType: TypeRef = null
+ private var myNothingType: TypeRef = null
+
+ def AnyClass = {
+ if (myAnyClass == null) myAnyClass = defn.AnyClass
+ myAnyClass
+ }
+ def NothingClass = {
+ if (myNothingClass == null) myNothingClass = defn.NothingClass
+ myNothingClass
+ }
+ def NullClass = {
+ if (myNullClass == null) myNullClass = defn.NullClass
+ myNullClass
+ }
+ def ObjectClass = {
+ if (myObjectClass == null) myObjectClass = defn.ObjectClass
+ myObjectClass
+ }
+ def AnyType = {
+ if (myAnyType == null) myAnyType = AnyClass.typeRef
+ myAnyType
+ }
+ def NothingType = {
+ if (myNothingType == null) myNothingType = NothingClass.typeRef
+ myNothingType
+ }
+
+ /** Indicates whether a previous subtype check used GADT bounds */
+ var GADTused = false
+
+ /** Record that GADT bounds of `sym` were used in a subtype check.
+ * But exclude constructor type parameters, as these are aliased
+ * to the corresponding class parameters, which does not constitute
+ * a true usage of a GADT symbol.
+ */
+ private def GADTusage(sym: Symbol) = {
+ if (!sym.owner.isConstructor) GADTused = true
+ true
+ }
+
+ // Subtype testing `<:<`
+
+ def topLevelSubType(tp1: Type, tp2: Type): Boolean = {
+ if (tp2 eq NoType) return false
+ if ((tp2 eq tp1) || (tp2 eq WildcardType)) return true
+ try isSubType(tp1, tp2)
+ finally
+ if (Config.checkConstraintsSatisfiable)
+ assert(isSatisfiable, constraint.show)
+ }
+
+ protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) {
+ if (tp2 eq NoType) false
+ else if (tp1 eq tp2) true
+ else {
+ val saved = constraint
+ val savedSuccessCount = successCount
+ try {
+ recCount = recCount + 1
+ val result =
+ if (recCount < Config.LogPendingSubTypesThreshold) firstTry(tp1, tp2)
+ else monitoredIsSubType(tp1, tp2)
+ recCount = recCount - 1
+ if (!result) constraint = saved
+ else if (recCount == 0 && needsGc) {
+ state.gc()
+ needsGc = false
+ }
+ if (Stats.monitored) recordStatistics(result, savedSuccessCount)
+ result
+ } catch {
+ case NonFatal(ex) =>
+ if (ex.isInstanceOf[AssertionError]) showGoal(tp1, tp2)
+ recCount -= 1
+ constraint = saved
+ successCount = savedSuccessCount
+ throw ex
+ }
+ }
+ }
+
+ private def monitoredIsSubType(tp1: Type, tp2: Type) = {
+ if (pendingSubTypes == null) {
+ pendingSubTypes = new mutable.HashSet[(Type, Type)]
+ ctx.log(s"!!! deep subtype recursion involving ${tp1.show} <:< ${tp2.show}, constraint = ${state.constraint.show}")
+ ctx.log(s"!!! constraint = ${constraint.show}")
+ //if (ctx.settings.YnoDeepSubtypes.value) {
+ // new Error("deep subtype").printStackTrace()
+ //}
+ assert(!ctx.settings.YnoDeepSubtypes.value)
+ if (Config.traceDeepSubTypeRecursions && !this.isInstanceOf[ExplainingTypeComparer])
+ ctx.log(TypeComparer.explained(implicit ctx => ctx.typeComparer.isSubType(tp1, tp2)))
+ }
+ val p = (tp1, tp2)
+ !pendingSubTypes(p) && {
+ try {
+ pendingSubTypes += p
+ firstTry(tp1, tp2)
+ } finally {
+ pendingSubTypes -= p
+ }
+ }
+ }
+
+ private def firstTry(tp1: Type, tp2: Type): Boolean = tp2 match {
+ case tp2: NamedType =>
+ def compareNamed(tp1: Type, tp2: NamedType): Boolean = {
+ implicit val ctx: Context = this.ctx
+ tp2.info match {
+ case info2: TypeAlias => isSubType(tp1, info2.alias)
+ case _ => tp1 match {
+ case tp1: NamedType =>
+ tp1.info match {
+ case info1: TypeAlias =>
+ if (isSubType(info1.alias, tp2)) return true
+ if (tp1.prefix.isStable) return false
+ // If tp1.prefix is stable, the alias does contain all information about the original ref, so
+ // there's no need to try something else. (This is important for performance).
+ // To see why we cannot in general stop here, consider:
+ //
+ // trait C { type A }
+ // trait D { type A = String }
+ // (C & D)#A <: C#A
+ //
+ // Following the alias leads to the judgment `String <: C#A` which is false.
+ // However the original judgment should be true.
+ case _ =>
+ }
+ val sym1 =
+ if (tp1.symbol.is(ModuleClass) && tp2.symbol.is(ModuleVal))
+ // For convenience we want X$ <:< X.type
+ // This is safe because X$ self-type is X.type
+ tp1.symbol.companionModule
+ else
+ tp1.symbol
+ if ((sym1 ne NoSymbol) && (sym1 eq tp2.symbol))
+ ctx.erasedTypes ||
+ sym1.isStaticOwner ||
+ isSubType(tp1.prefix, tp2.prefix) ||
+ thirdTryNamed(tp1, tp2)
+ else
+ ( (tp1.name eq tp2.name)
+ && isSubType(tp1.prefix, tp2.prefix)
+ && tp1.signature == tp2.signature
+ && !tp1.isInstanceOf[WithFixedSym]
+ && !tp2.isInstanceOf[WithFixedSym]
+ ) ||
+ thirdTryNamed(tp1, tp2)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ }
+ }
+ compareNamed(tp1, tp2)
+ case tp2: ProtoType =>
+ isMatchedByProto(tp2, tp1)
+ case tp2: BoundType =>
+ tp2 == tp1 || secondTry(tp1, tp2)
+ case tp2: TypeVar =>
+ isSubType(tp1, tp2.underlying)
+ case tp2: WildcardType =>
+ def compareWild = tp2.optBounds match {
+ case TypeBounds(_, hi) => isSubType(tp1, hi)
+ case NoType => true
+ }
+ compareWild
+ case tp2: LazyRef =>
+ !tp2.evaluating && isSubType(tp1, tp2.ref)
+ case tp2: AnnotatedType =>
+ isSubType(tp1, tp2.tpe) // todo: refine?
+ case tp2: ThisType =>
+ def compareThis = {
+ val cls2 = tp2.cls
+ tp1 match {
+ case tp1: ThisType =>
+ // We treat two prefixes A.this, B.this as equivalent if
+ // A's selftype derives from B and B's selftype derives from A.
+ val cls1 = tp1.cls
+ cls1.classInfo.selfType.derivesFrom(cls2) &&
+ cls2.classInfo.selfType.derivesFrom(cls1)
+ case tp1: NamedType if cls2.is(Module) && cls2.eq(tp1.widen.typeSymbol) =>
+ cls2.isStaticOwner ||
+ isSubType(tp1.prefix, cls2.owner.thisType) ||
+ secondTry(tp1, tp2)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ }
+ compareThis
+ case tp2: SuperType =>
+ def compareSuper = tp1 match {
+ case tp1: SuperType =>
+ isSubType(tp1.thistpe, tp2.thistpe) &&
+ isSameType(tp1.supertpe, tp2.supertpe)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ compareSuper
+ case AndType(tp21, tp22) =>
+ isSubType(tp1, tp21) && isSubType(tp1, tp22)
+ case OrType(tp21, tp22) =>
+ if (tp21.stripTypeVar eq tp22.stripTypeVar) isSubType(tp1, tp21)
+ else secondTry(tp1, tp2)
+ case TypeErasure.ErasedValueType(tycon1, underlying2) =>
+ def compareErasedValueType = tp1 match {
+ case TypeErasure.ErasedValueType(tycon2, underlying1) =>
+ (tycon1.symbol eq tycon2.symbol) && isSameType(underlying1, underlying2)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ compareErasedValueType
+ case ErrorType =>
+ true
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+
+ private def secondTry(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: NamedType =>
+ tp1.info match {
+ case info1: TypeAlias =>
+ if (isSubType(info1.alias, tp2)) return true
+ if (tp1.prefix.isStable) return false
+ case _ =>
+ }
+ thirdTry(tp1, tp2)
+ case tp1: PolyParam =>
+ def flagNothingBound = {
+ if (!frozenConstraint && tp2.isRef(defn.NothingClass) && state.isGlobalCommittable) {
+ def msg = s"!!! instantiated to Nothing: $tp1, constraint = ${constraint.show}"
+ if (Config.failOnInstantiationToNothing) assert(false, msg)
+ else ctx.log(msg)
+ }
+ true
+ }
+ def comparePolyParam =
+ ctx.mode.is(Mode.TypevarsMissContext) ||
+ isSubTypeWhenFrozen(bounds(tp1).hi, tp2) || {
+ if (canConstrain(tp1)) addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound
+ else thirdTry(tp1, tp2)
+ }
+ comparePolyParam
+ case tp1: ThisType =>
+ val cls1 = tp1.cls
+ tp2 match {
+ case tp2: TermRef if cls1.is(Module) && cls1.eq(tp2.widen.typeSymbol) =>
+ cls1.isStaticOwner ||
+ isSubType(cls1.owner.thisType, tp2.prefix) ||
+ thirdTry(tp1, tp2)
+ case _ =>
+ thirdTry(tp1, tp2)
+ }
+ case tp1: SkolemType =>
+ tp2 match {
+ case tp2: SkolemType if !ctx.phase.isTyper && tp1.info <:< tp2.info => true
+ case _ => thirdTry(tp1, tp2)
+ }
+ case tp1: TypeVar =>
+ isSubType(tp1.underlying, tp2)
+ case tp1: WildcardType =>
+ def compareWild = tp1.optBounds match {
+ case TypeBounds(lo, _) => isSubType(lo, tp2)
+ case _ => true
+ }
+ compareWild
+ case tp1: LazyRef =>
+ // If `tp1` is in train of being evaluated, don't force it
+ // because that would cause an assertionError. Return false instead.
+ // See i859.scala for an example where we hit this case.
+ !tp1.evaluating && isSubType(tp1.ref, tp2)
+ case tp1: AnnotatedType =>
+ isSubType(tp1.tpe, tp2)
+ case AndType(tp11, tp12) =>
+ if (tp11.stripTypeVar eq tp12.stripTypeVar) isSubType(tp11, tp2)
+ else thirdTry(tp1, tp2)
+ case tp1 @ OrType(tp11, tp12) =>
+ def joinOK = tp2.dealias match {
+ case tp12: HKApply =>
+ // If we apply the default algorithm for `A[X] | B[Y] <: C[Z]` where `C` is a
+ // type parameter, we will instantiate `C` to `A` and then fail when comparing
+ // with `B[Y]`. To do the right thing, we need to instantiate `C` to the
+ // common superclass of `A` and `B`.
+ isSubType(tp1.join, tp2)
+ case _ =>
+ false
+ }
+ joinOK || isSubType(tp11, tp2) && isSubType(tp12, tp2)
+ case ErrorType =>
+ true
+ case _ =>
+ thirdTry(tp1, tp2)
+ }
+
+ private def thirdTryNamed(tp1: Type, tp2: NamedType): Boolean = tp2.info match {
+ case TypeBounds(lo2, _) =>
+ def compareGADT: Boolean = {
+ val gbounds2 = ctx.gadt.bounds(tp2.symbol)
+ (gbounds2 != null) &&
+ (isSubTypeWhenFrozen(tp1, gbounds2.lo) ||
+ narrowGADTBounds(tp2, tp1, isUpper = false)) &&
+ GADTusage(tp2.symbol)
+ }
+ ((frozenConstraint || !isCappable(tp1)) && isSubType(tp1, lo2) ||
+ compareGADT ||
+ fourthTry(tp1, tp2))
+
+ case _ =>
+ val cls2 = tp2.symbol
+ if (cls2.isClass) {
+ val base = tp1.baseTypeRef(cls2)
+ if (base.exists && (base ne tp1)) return isSubType(base, tp2)
+ if (cls2 == defn.SingletonClass && tp1.isStable) return true
+ }
+ fourthTry(tp1, tp2)
+ }
+
+ private def thirdTry(tp1: Type, tp2: Type): Boolean = tp2 match {
+ case tp2: NamedType =>
+ thirdTryNamed(tp1, tp2)
+ case tp2: PolyParam =>
+ def comparePolyParam =
+ (ctx.mode is Mode.TypevarsMissContext) ||
+ isSubTypeWhenFrozen(tp1, bounds(tp2).lo) || {
+ if (canConstrain(tp2)) addConstraint(tp2, tp1.widenExpr, fromBelow = true)
+ else fourthTry(tp1, tp2)
+ }
+ comparePolyParam
+ case tp2: RefinedType =>
+ def compareRefinedSlow: Boolean = {
+ val name2 = tp2.refinedName
+ isSubType(tp1, tp2.parent) &&
+ (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2))
+ }
+ def compareRefined: Boolean = {
+ val tp1w = tp1.widen
+ val skipped2 = skipMatching(tp1w, tp2)
+ if ((skipped2 eq tp2) || !Config.fastPathForRefinedSubtype)
+ tp1 match {
+ case tp1: AndType =>
+ // Delay calling `compareRefinedSlow` because looking up a member
+ // of an `AndType` can lead to a cascade of subtyping checks
+ // This twist is needed to make collection/generic/ParFactory.scala compile
+ fourthTry(tp1, tp2) || compareRefinedSlow
+ case _ =>
+ compareRefinedSlow || fourthTry(tp1, tp2)
+ }
+ else // fast path, in particular for refinements resulting from parameterization.
+ isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) &&
+ isSubType(tp1, skipped2)
+ }
+ compareRefined
+ case tp2: RecType =>
+ def compareRec = tp1.safeDealias match {
+ case tp1: RecType =>
+ val rthis1 = RecThis(tp1)
+ isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1))
+ case _ =>
+ val tp1stable = ensureStableSingleton(tp1)
+ isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable))
+ }
+ compareRec
+ case tp2 @ HKApply(tycon2, args2) =>
+ compareHkApply2(tp1, tp2, tycon2, args2)
+ case tp2 @ PolyType(tparams2, body2) =>
+ def compareHkLambda: Boolean = tp1.stripTypeVar match {
+ case tp1 @ PolyType(tparams1, body1) =>
+ /* Don't compare bounds of lambdas under language:Scala2, or t2994 will fail
+ * The issue is that, logically, bounds should compare contravariantly,
+ * but that would invalidate a pattern exploited in t2994:
+ *
+ * [X0 <: Number] -> Number <:< [X0] -> Any
+ *
+ * Under the new scheme, `[X0] -> Any` is NOT a kind that subsumes
+ * all other bounds. You'd have to write `[X0 >: Any <: Nothing] -> Any` instead.
+ * This might look weird, but is the only logically correct way to do it.
+ *
+ * Note: it would be nice if this could trigger a migration warning, but I
+ * am not sure how, since the code is buried so deep in subtyping logic.
+ */
+ def boundsOK =
+ ctx.scala2Mode ||
+ tparams1.corresponds(tparams2)((tparam1, tparam2) =>
+ isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds))
+ val saved = comparedPolyTypes
+ comparedPolyTypes += tp1
+ comparedPolyTypes += tp2
+ try
+ variancesConform(tparams1, tparams2) &&
+ boundsOK &&
+ isSubType(body1, body2.subst(tp2, tp1))
+ finally comparedPolyTypes = saved
+ case _ =>
+ if (!tp1.isHK) {
+ tp2 match {
+ case EtaExpansion(tycon2) if tycon2.symbol.isClass =>
+ return isSubType(tp1, tycon2)
+ case _ =>
+ }
+ }
+ fourthTry(tp1, tp2)
+ }
+ compareHkLambda
+ case OrType(tp21, tp22) =>
+ // Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22)
+ // and analogously for T1 <: T21 | (T221 & T222)
+ // `|' types to the right of <: are problematic, because
+ // we have to choose one constraint set or another, which might cut off
+ // solutions. The rewriting delays the point where we have to choose.
+ tp21 match {
+ case AndType(tp211, tp212) =>
+ return isSubType(tp1, OrType(tp211, tp22)) && isSubType(tp1, OrType(tp212, tp22))
+ case _ =>
+ }
+ tp22 match {
+ case AndType(tp221, tp222) =>
+ return isSubType(tp1, OrType(tp21, tp221)) && isSubType(tp1, OrType(tp21, tp222))
+ case _ =>
+ }
+ either(isSubType(tp1, tp21), isSubType(tp1, tp22)) || fourthTry(tp1, tp2)
+ case tp2 @ MethodType(_, formals2) =>
+ def compareMethod = tp1 match {
+ case tp1 @ MethodType(_, formals1) =>
+ (tp1.signature consistentParams tp2.signature) &&
+ matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit && // needed?
+ isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ false
+ }
+ compareMethod
+ case tp2 @ ExprType(restpe2) =>
+ def compareExpr = tp1 match {
+ // We allow ()T to be a subtype of => T.
+ // We need some subtype relationship between them so that e.g.
+ // def toString and def toString() don't clash when seen
+ // as members of the same type. And it seems most logical to take
+ // ()T <:< => T, since everything one can do with a => T one can
+ // also do with a ()T by automatic () insertion.
+ case tp1 @ MethodType(Nil, _) => isSubType(tp1.resultType, restpe2)
+ case _ => isSubType(tp1.widenExpr, restpe2)
+ }
+ compareExpr
+ case tp2 @ TypeBounds(lo2, hi2) =>
+ def compareTypeBounds = tp1 match {
+ case tp1 @ TypeBounds(lo1, hi1) =>
+ (tp2.variance > 0 && tp1.variance >= 0 || (lo2 eq NothingType) || isSubType(lo2, lo1)) &&
+ (tp2.variance < 0 && tp1.variance <= 0 || (hi2 eq AnyType) || isSubType(hi1, hi2))
+ case tp1: ClassInfo =>
+ tp2 contains tp1
+ case _ =>
+ false
+ }
+ compareTypeBounds
+ case ClassInfo(pre2, cls2, _, _, _) =>
+ def compareClassInfo = tp1 match {
+ case ClassInfo(pre1, cls1, _, _, _) =>
+ (cls1 eq cls2) && isSubType(pre1, pre2)
+ case _ =>
+ false
+ }
+ compareClassInfo
+ case _ =>
+ fourthTry(tp1, tp2)
+ }
+
+ private def fourthTry(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: TypeRef =>
+ tp1.info match {
+ case TypeBounds(_, hi1) =>
+ def compareGADT = {
+ val gbounds1 = ctx.gadt.bounds(tp1.symbol)
+ (gbounds1 != null) &&
+ (isSubTypeWhenFrozen(gbounds1.hi, tp2) ||
+ narrowGADTBounds(tp1, tp2, isUpper = true)) &&
+ GADTusage(tp1.symbol)
+ }
+ isSubType(hi1, tp2) || compareGADT
+ case _ =>
+ def isNullable(tp: Type): Boolean = tp.widenDealias match {
+ case tp: TypeRef => tp.symbol.isNullableClass
+ case tp: RefinedOrRecType => isNullable(tp.parent)
+ case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2)
+ case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2)
+ case _ => false
+ }
+ (tp1.symbol eq NothingClass) && tp2.isValueTypeOrLambda ||
+ (tp1.symbol eq NullClass) && isNullable(tp2)
+ }
+ case tp1: SingletonType =>
+ /** if `tp2 == p.type` and `p: q.type` then try `tp1 <:< q.type` as a last effort.*/
+ def comparePaths = tp2 match {
+ case tp2: TermRef =>
+ tp2.info.widenExpr match {
+ case tp2i: SingletonType =>
+ isSubType(tp1, tp2i) // see z1720.scala for a case where this can arise even in typer.
+ case _ => false
+ }
+ case _ =>
+ false
+ }
+ isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths
+ case tp1: RefinedType =>
+ isNewSubType(tp1.parent, tp2)
+ case tp1: RecType =>
+ isNewSubType(tp1.parent, tp2)
+ case tp1 @ HKApply(tycon1, args1) =>
+ compareHkApply1(tp1, tycon1, args1, tp2)
+ case EtaExpansion(tycon1) =>
+ isSubType(tycon1, tp2)
+ case AndType(tp11, tp12) =>
+ // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2
+ // and analogously for T11 & (T121 | T122) & T12 <: T2
+ // `&' types to the left of <: are problematic, because
+ // we have to choose one constraint set or another, which might cut off
+ // solutions. The rewriting delays the point where we have to choose.
+ tp11 match {
+ case OrType(tp111, tp112) =>
+ return isSubType(AndType(tp111, tp12), tp2) && isSubType(AndType(tp112, tp12), tp2)
+ case _ =>
+ }
+ tp12 match {
+ case OrType(tp121, tp122) =>
+ return isSubType(AndType(tp11, tp121), tp2) && isSubType(AndType(tp11, tp122), tp2)
+ case _ =>
+ }
+ either(isSubType(tp11, tp2), isSubType(tp12, tp2))
+ case JavaArrayType(elem1) =>
+ def compareJavaArray = tp2 match {
+ case JavaArrayType(elem2) => isSubType(elem1, elem2)
+ case _ => tp2 isRef ObjectClass
+ }
+ compareJavaArray
+ case tp1: ExprType if ctx.phase.id > ctx.gettersPhase.id =>
+ // getters might have converted T to => T, need to compensate.
+ isSubType(tp1.widenExpr, tp2)
+ case _ =>
+ false
+ }
+
+ /** Subtype test for the hk application `tp2 = tycon2[args2]`.
+ */
+ def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ val tparams = tycon2.typeParams
+ if (tparams.isEmpty) return false // can happen for ill-typed programs, e.g. neg/tcpoly_overloaded.scala
+
+ /** True if `tp1` and `tp2` have compatible type constructors and their
+ * corresponding arguments are subtypes relative to their variance (see `isSubArgs`).
+ */
+ def isMatchingApply(tp1: Type): Boolean = tp1 match {
+ case HKApply(tycon1, args1) =>
+ tycon1.dealias match {
+ case tycon1: PolyParam =>
+ (tycon1 == tycon2 ||
+ canConstrain(tycon1) && tryInstantiate(tycon1, tycon2)) &&
+ isSubArgs(args1, args2, tparams)
+ case tycon1: TypeRef =>
+ tycon2.dealias match {
+ case tycon2: TypeRef if tycon1.symbol == tycon2.symbol =>
+ isSubType(tycon1.prefix, tycon2.prefix) &&
+ isSubArgs(args1, args2, tparams)
+ case _ =>
+ false
+ }
+ case tycon1: TypeVar =>
+ isMatchingApply(tycon1.underlying)
+ case tycon1: AnnotatedType =>
+ isMatchingApply(tycon1.underlying)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ /** `param2` can be instantiated to a type application prefix of the LHS
+ * or to a type application prefix of one of the LHS base class instances
+ * and the resulting type application is a supertype of `tp1`,
+ * or fallback to fourthTry.
+ */
+ def canInstantiate(tycon2: PolyParam): Boolean = {
+
+ /** Let
+ *
+ * `tparams_1, ..., tparams_k-1` be the type parameters of the rhs
+ * `tparams1_1, ..., tparams1_n-1` be the type parameters of the constructor of the lhs
+ * `args1_1, ..., args1_n-1` be the type arguments of the lhs
+ * `d = n - k`
+ *
+ * Returns `true` iff `d >= 0` and `tycon2` can be instantiated to
+ *
+ * [tparams1_d, ... tparams1_n-1] -> tycon1a[args_1, ..., args_d-1, tparams_d, ... tparams_n-1]
+ *
+ * such that the resulting type application is a supertype of `tp1`.
+ */
+ def tyconOK(tycon1a: Type, args1: List[Type]) = {
+ var tycon1b = tycon1a
+ val tparams1a = tycon1a.typeParams
+ val lengthDiff = tparams1a.length - tparams.length
+ lengthDiff >= 0 && {
+ val tparams1 = tparams1a.drop(lengthDiff)
+ variancesConform(tparams1, tparams) && {
+ if (lengthDiff > 0)
+ tycon1b = PolyType(tparams1.map(_.paramName), tparams1.map(_.paramVariance))(
+ tl => tparams1.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tycon1a.appliedTo(args1.take(lengthDiff) ++
+ tparams1.indices.toList.map(PolyParam(tl, _))))
+ (ctx.mode.is(Mode.TypevarsMissContext) ||
+ tryInstantiate(tycon2, tycon1b.ensureHK)) &&
+ isSubType(tp1, tycon1b.appliedTo(args2))
+ }
+ }
+ }
+
+ tp1.widen match {
+ case tp1w @ HKApply(tycon1, args1) =>
+ tyconOK(tycon1, args1)
+ case tp1w =>
+ tp1w.typeSymbol.isClass && {
+ val classBounds = tycon2.classSymbols
+ def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 =>
+ classBounds.exists(bc.derivesFrom) &&
+ tyconOK(tp1w.baseTypeRef(bc), tp1w.baseArgInfos(bc)) ||
+ liftToBase(bcs1)
+ case _ =>
+ false
+ }
+ liftToBase(tp1w.baseClasses)
+ } ||
+ fourthTry(tp1, tp2)
+ }
+ }
+
+ /** Fall back to comparing either with `fourthTry` or against the lower
+ * approximation of the rhs.
+ * @param tyconLo The type constructor's lower approximation.
+ */
+ def fallback(tyconLo: Type) =
+ either(fourthTry(tp1, tp2), isSubType(tp1, tyconLo.applyIfParameterized(args2)))
+
+ /** Let `tycon2bounds` be the bounds of the RHS type constructor `tycon2`.
+ * Let `app2 = tp2` where the type constructor of `tp2` is replaced by
+ * `tycon2bounds.lo`.
+ * If both bounds are the same, continue with `tp1 <:< app2`.
+ * otherwise continue with either
+ *
+ * tp1 <:< tp2 using fourthTry (this might instantiate params in tp1)
+ * tp1 <:< app2 using isSubType (this might instantiate params in tp2)
+ */
+ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean =
+ if (tycon2bounds.lo eq tycon2bounds.hi)
+ isSubType(tp1,
+ if (tyconIsTypeRef) tp2.superType
+ else tycon2bounds.lo.applyIfParameterized(args2))
+ else
+ fallback(tycon2bounds.lo)
+
+ tycon2 match {
+ case param2: PolyParam =>
+ isMatchingApply(tp1) || {
+ if (canConstrain(param2)) canInstantiate(param2)
+ else compareLower(bounds(param2), tyconIsTypeRef = false)
+ }
+ case tycon2: TypeRef =>
+ isMatchingApply(tp1) ||
+ compareLower(tycon2.info.bounds, tyconIsTypeRef = true)
+ case _: TypeVar | _: AnnotatedType =>
+ isSubType(tp1, tp2.superType)
+ case tycon2: HKApply =>
+ fallback(tycon2.lowerBound)
+ case _ =>
+ false
+ }
+ }
+
+ /** Subtype test for the hk application `tp1 = tycon1[args1]`.
+ */
+ def compareHkApply1(tp1: HKApply, tycon1: Type, args1: List[Type], tp2: Type): Boolean =
+ tycon1 match {
+ case param1: PolyParam =>
+ def canInstantiate = tp2 match {
+ case AppliedType(tycon2, args2) =>
+ tryInstantiate(param1, tycon2.ensureHK) && isSubArgs(args1, args2, tycon2.typeParams)
+ case _ =>
+ false
+ }
+ canConstrain(param1) && canInstantiate ||
+ isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2)
+ case tycon1: TypeProxy =>
+ isSubType(tp1.superType, tp2)
+ case _ =>
+ false
+ }
+
+ /** Subtype test for corresponding arguments in `args1`, `args2` according to
+ * variances in type parameters `tparams`.
+ */
+ def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo]): Boolean =
+ if (args1.isEmpty) args2.isEmpty
+ else args2.nonEmpty && {
+ val v = tparams.head.paramVariance
+ (v > 0 || isSubType(args2.head, args1.head)) &&
+ (v < 0 || isSubType(args1.head, args2.head))
+ } && isSubArgs(args1.tail, args2.tail, tparams)
+
+ /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where
+ * - `B` derives from one of the class symbols of `tp2`,
+ * - the type parameters of `B` match one-by-one the variances of `tparams`,
+ * - `B` satisfies predicate `p`.
+ */
+ private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = {
+ val classBounds = tp2.classSymbols
+ def recur(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 =>
+ val baseRef = tp1.baseTypeRef(bc)
+ (classBounds.exists(bc.derivesFrom) &&
+ variancesConform(baseRef.typeParams, tparams) &&
+ p(baseRef.appliedTo(tp1.baseArgInfos(bc)))
+ ||
+ recur(bcs1))
+ case nil =>
+ false
+ }
+ recur(tp1.baseClasses)
+ }
+
+ /** Replace any top-level recursive type `{ z => T }` in `tp` with
+ * `[z := anchor]T`.
+ */
+ private def fixRecs(anchor: SingletonType, tp: Type): Type = {
+ def fix(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType => fix(tp.parent).substRecThis(tp, anchor)
+ case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo)
+ case tp: PolyParam => fixOrElse(bounds(tp).hi, tp)
+ case tp: TypeProxy => fixOrElse(tp.underlying, tp)
+ case tp: AndOrType => tp.derivedAndOrType(fix(tp.tp1), fix(tp.tp2))
+ case tp => tp
+ }
+ def fixOrElse(tp: Type, fallback: Type) = {
+ val tp1 = fix(tp)
+ if (tp1 ne tp) tp1 else fallback
+ }
+ fix(tp)
+ }
+
+ /** Returns true iff the result of evaluating either `op1` or `op2` is true,
+ * trying at the same time to keep the constraint as wide as possible.
+ * E.g, if
+ *
+ * tp11 <:< tp12 = true with post-constraint c1
+ * tp12 <:< tp22 = true with post-constraint c2
+ *
+ * and c1 subsumes c2, then c2 is kept as the post-constraint of the result,
+ * otherwise c1 is kept.
+ *
+ * This method is used to approximate a solution in one of the following cases
+ *
+ * T1 & T2 <:< T3
+ * T1 <:< T2 | T3
+ *
+ * In the first case (the second one is analogous), we have a choice whether we
+ * want to establish the subtyping judgement using
+ *
+ * T1 <:< T3 or T2 <:< T3
+ *
+ * as a precondition. Either precondition might constrain type variables.
+ * The purpose of this method is to pick the precondition that constrains less.
+ * The method is not complete, because sometimes there is no best solution. Example:
+ *
+ * A? & B? <: T
+ *
+ * Here, each precondition leads to a different constraint, and neither of
+ * the two post-constraints subsumes the other.
+ */
+ private def either(op1: => Boolean, op2: => Boolean): Boolean = {
+ val preConstraint = constraint
+ op1 && {
+ val leftConstraint = constraint
+ constraint = preConstraint
+ if (!(op2 && subsumes(leftConstraint, constraint, preConstraint))) {
+ if (constr != noPrinter && !subsumes(constraint, leftConstraint, preConstraint))
+ constr.println(i"CUT - prefer $leftConstraint over $constraint")
+ constraint = leftConstraint
+ }
+ true
+ } || op2
+ }
+
+ /** Like tp1 <:< tp2, but returns false immediately if we know that
+ * the case was covered previously during subtyping.
+ */
+ private def isNewSubType(tp1: Type, tp2: Type): Boolean =
+ if (isCovered(tp1) && isCovered(tp2)) {
+ //println(s"useless subtype: $tp1 <:< $tp2")
+ false
+ } else isSubType(tp1, tp2)
+
+ /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of
+ * the normalized type of the refinement `tp2`?
+ * Normalization is as follows: If `tp2` contains a skolem to its refinement type,
+ * rebase both itself and the member info of `tp` on a freshly created skolem type.
+ */
+ protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = {
+ val rinfo2 = tp2.refinedInfo
+ val mbr = tp1.member(name)
+
+ def qualifies(m: SingleDenotation) = isSubType(m.info, rinfo2)
+
+ def memberMatches: Boolean = mbr match { // inlined hasAltWith for performance
+ case mbr: SingleDenotation => qualifies(mbr)
+ case _ => mbr hasAltWith qualifies
+ }
+
+ // special case for situations like:
+ // class C { type T }
+ // val foo: C
+ // foo.type <: C { type T {= , <: , >:} foo.T }
+ def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && {
+ rinfo2 match {
+ case rinfo2: TypeBounds =>
+ val mbr1 = tp1.select(name)
+ !defn.isBottomType(tp1.widen) &&
+ (mbr1 =:= rinfo2.hi || (rinfo2.hi ne rinfo2.lo) && mbr1 =:= rinfo2.lo)
+ case _ => false
+ }
+ }
+
+ /*>|>*/ ctx.traceIndented(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ {
+ memberMatches || selfReferentialMatch
+ }
+ }
+
+ final def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match {
+ case tp: SingletonType if tp.isStable => tp
+ case tp: ValueType => SkolemType(tp)
+ case tp: TypeProxy => ensureStableSingleton(tp.underlying)
+ }
+
+ /** Skip refinements in `tp2` which match corresponding refinements in `tp1`.
+ * "Match" means:
+ * - they appear in the same order,
+ * - they refine the same names,
+ * - the refinement in `tp1` is an alias type, and
+ * - neither refinement refers back to the refined type via a refined this.
+ * @return The parent type of `tp2` after skipping the matching refinements.
+ */
+ private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match {
+ case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName =>
+ tp2.parent match {
+ case parent2: RefinedType => skipMatching(parent1, parent2)
+ case parent2 => parent2
+ }
+ case _ => tp2
+ }
+
+ /** Are refinements in `tp1` pairwise subtypes of the refinements of `tp2`
+ * up to parent type `limit`?
+ * @pre `tp1` has the necessary number of refinements, they are type aliases,
+ * and their names match the corresponding refinements in `tp2`.
+ * Further, no refinement refers back to the refined type via a refined this.
+ * The precondition is established by `skipMatching`.
+ */
+ private def isSubRefinements(tp1: RefinedType, tp2: RefinedType, limit: Type): Boolean = {
+ def hasSubRefinement(tp1: RefinedType, refine2: Type): Boolean = {
+ isSubType(tp1.refinedInfo, refine2) || {
+ // last effort: try to adapt variances of higher-kinded types if this is sound.
+ val adapted2 = refine2.adaptHkVariances(tp1.parent.member(tp1.refinedName).symbol.info)
+ adapted2.ne(refine2) && hasSubRefinement(tp1, adapted2)
+ }
+ }
+ hasSubRefinement(tp1, tp2.refinedInfo) && (
+ (tp2.parent eq limit) ||
+ isSubRefinements(
+ tp1.parent.asInstanceOf[RefinedType], tp2.parent.asInstanceOf[RefinedType], limit))
+ }
+
+ /** A type has been covered previously in subtype checking if it
+ * is some combination of TypeRefs that point to classes, where the
+ * combiners are RefinedTypes, RecTypes, AndTypes or AnnotatedTypes.
+ * One exception: Refinements referring to basetype args are never considered
+ * to be already covered. This is necessary because such refined types might
+ * still need to be compared with a compareAliasRefined.
+ */
+ private def isCovered(tp: Type): Boolean = tp.dealias.stripTypeVar match {
+ case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass
+ case tp: ProtoType => false
+ case tp: RefinedOrRecType => isCovered(tp.parent)
+ case tp: AnnotatedType => isCovered(tp.underlying)
+ case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2)
+ case _ => false
+ }
+
+ /** Defer constraining type variables when compared against prototypes */
+ def isMatchedByProto(proto: ProtoType, tp: Type) = tp.stripTypeVar match {
+ case tp: PolyParam if constraint contains tp => true
+ case _ => proto.isMatchedBy(tp)
+ }
+
+ /** Can type `tp` be constrained from above by adding a constraint to
+ * a typevar that it refers to? In that case we have to be careful not
+ * to approximate with the lower bound of a type in `thirdTry`. Instead,
+ * we should first unroll `tp1` until we hit the type variable and bind the
+ * type variable with (the corresponding type in) `tp2` instead.
+ */
+ private def isCappable(tp: Type): Boolean = tp match {
+ case tp: PolyParam => constraint contains tp
+ case tp: TypeProxy => isCappable(tp.underlying)
+ case tp: AndOrType => isCappable(tp.tp1) || isCappable(tp.tp2)
+ case _ => false
+ }
+
+ /** Narrow gadt.bounds for the type parameter referenced by `tr` to include
+ * `bound` as an upper or lower bound (which depends on `isUpper`).
+ * Test that the resulting bounds are still satisfiable.
+ */
+ private def narrowGADTBounds(tr: NamedType, bound: Type, isUpper: Boolean): Boolean =
+ ctx.mode.is(Mode.GADTflexible) && !frozenConstraint && {
+ val tparam = tr.symbol
+ typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
+ if (bound.isRef(tparam)) false
+ else bound match {
+ case bound: TypeRef
+ if bound.symbol.is(BindDefinedType) &&
+ ctx.gadt.bounds.contains(bound.symbol) &&
+ !tr.symbol.is(BindDefinedType) =>
+ // Avoid having pattern-bound types in gadt bounds,
+ // as these might be eliminated once the pattern is typechecked.
+ // Pattern-bound type symbols should be narrowed first, only if that fails
+ // should symbols in the environment be constrained.
+ narrowGADTBounds(bound, tr, !isUpper)
+ case _ =>
+ val oldBounds = ctx.gadt.bounds(tparam)
+ val newBounds =
+ if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
+ else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
+ isSubType(newBounds.lo, newBounds.hi) &&
+ { ctx.gadt.setBounds(tparam, newBounds); true }
+ }
+ }
+
+ // Tests around `matches`
+
+ /** A function implementing `tp1` matches `tp2`. */
+ final def matchesType(tp1: Type, tp2: Type, relaxed: Boolean): Boolean = tp1.widen match {
+ case tp1: MethodType =>
+ tp2.widen match {
+ case tp2: MethodType =>
+ tp1.isImplicit == tp2.isImplicit &&
+ matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isJava, tp2.isJava) &&
+ matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed)
+ case tp2 =>
+ relaxed && tp1.paramNames.isEmpty &&
+ matchesType(tp1.resultType, tp2, relaxed)
+ }
+ case tp1: PolyType =>
+ tp2.widen match {
+ case tp2: PolyType =>
+ sameLength(tp1.paramNames, tp2.paramNames) &&
+ matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed)
+ case _ =>
+ false
+ }
+ case _ =>
+ tp2.widen match {
+ case _: PolyType =>
+ false
+ case tp2: MethodType =>
+ relaxed && tp2.paramNames.isEmpty &&
+ matchesType(tp1, tp2.resultType, relaxed)
+ case tp2 =>
+ relaxed || isSameType(tp1, tp2)
+ }
+ }
+
+ /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
+ def matchingParams(formals1: List[Type], formals2: List[Type], isJava1: Boolean, isJava2: Boolean): Boolean = formals1 match {
+ case formal1 :: rest1 =>
+ formals2 match {
+ case formal2 :: rest2 =>
+ (isSameTypeWhenFrozen(formal1, formal2)
+ || isJava1 && (formal2 isRef ObjectClass) && (formal1 isRef AnyClass)
+ || isJava2 && (formal1 isRef ObjectClass) && (formal2 isRef AnyClass)) &&
+ matchingParams(rest1, rest2, isJava1, isJava2)
+ case nil =>
+ false
+ }
+ case nil =>
+ formals2.isEmpty
+ }
+
+ /** Do generic types `poly1` and `poly2` have type parameters that
+ * have the same bounds (after renaming one set to the other)?
+ */
+ def matchingTypeParams(poly1: PolyType, poly2: PolyType): Boolean =
+ (poly1.paramBounds corresponds poly2.paramBounds)((b1, b2) =>
+ isSameType(b1, b2.subst(poly2, poly1)))
+
+ // Type equality =:=
+
+ /** Two types are the same if are mutual subtypes of each other */
+ def isSameType(tp1: Type, tp2: Type): Boolean =
+ if (tp1 eq NoType) false
+ else if (tp1 eq tp2) true
+ else isSubType(tp1, tp2) && isSubType(tp2, tp1)
+
+ /** Same as `isSameType` but also can be applied to overloaded TermRefs, where
+ * two overloaded refs are the same if they have pairwise equal alternatives
+ */
+ def isSameRef(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSameRef($tp1, $tp2") {
+ def isSubRef(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: TermRef if tp1.isOverloaded =>
+ tp1.alternatives forall (isSubRef(_, tp2))
+ case _ =>
+ tp2 match {
+ case tp2: TermRef if tp2.isOverloaded =>
+ tp2.alternatives exists (isSubRef(tp1, _))
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ }
+ isSubRef(tp1, tp2) && isSubRef(tp2, tp1)
+ }
+
+ /** The greatest lower bound of two types */
+ def glb(tp1: Type, tp2: Type): Type = /*>|>*/ ctx.traceIndented(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ {
+ if (tp1 eq tp2) tp1
+ else if (!tp1.exists) tp2
+ else if (!tp2.exists) tp1
+ else if ((tp1 isRef AnyClass) || (tp2 isRef NothingClass)) tp2
+ else if ((tp2 isRef AnyClass) || (tp1 isRef NothingClass)) tp1
+ else tp2 match { // normalize to disjunctive normal form if possible.
+ case OrType(tp21, tp22) =>
+ tp1 & tp21 | tp1 & tp22
+ case _ =>
+ tp1 match {
+ case OrType(tp11, tp12) =>
+ tp11 & tp2 | tp12 & tp2
+ case _ =>
+ val t1 = mergeIfSub(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = mergeIfSub(tp2, tp1)
+ if (t2.exists) t2
+ else tp1 match {
+ case tp1: ConstantType =>
+ tp2 match {
+ case tp2: ConstantType =>
+ // Make use of the fact that the intersection of two constant types
+ // types which are not subtypes of each other is known to be empty.
+ // Note: The same does not apply to singleton types in general.
+ // E.g. we could have a pattern match against `x.type & y.type`
+ // which might succeed if `x` and `y` happen to be the same ref
+ // at run time. It would not work to replace that with `Nothing`.
+ // However, maybe we can still apply the replacement to
+ // types which are not explicitly written.
+ defn.NothingType
+ case _ => andType(tp1, tp2)
+ }
+ case _ => andType(tp1, tp2)
+ }
+ }
+ }
+ }
+ }
+
+ /** The greatest lower bound of a list types */
+ final def glb(tps: List[Type]): Type =
+ ((defn.AnyType: Type) /: tps)(glb)
+
+ /** The least upper bound of two types
+ * @note We do not admit singleton types in or-types as lubs.
+ */
+ def lub(tp1: Type, tp2: Type): Type = /*>|>*/ ctx.traceIndented(s"lub(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ {
+ if (tp1 eq tp2) tp1
+ else if (!tp1.exists) tp1
+ else if (!tp2.exists) tp2
+ else if ((tp1 isRef AnyClass) || (tp2 isRef NothingClass)) tp1
+ else if ((tp2 isRef AnyClass) || (tp1 isRef NothingClass)) tp2
+ else {
+ val t1 = mergeIfSuper(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = mergeIfSuper(tp2, tp1)
+ if (t2.exists) t2
+ else {
+ val tp1w = tp1.widen
+ val tp2w = tp2.widen
+ if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w)
+ else orType(tp1w, tp2w) // no need to check subtypes again
+ }
+ }
+ }
+ }
+
+ /** The least upper bound of a list of types */
+ final def lub(tps: List[Type]): Type =
+ ((defn.NothingType: Type) /: tps)(lub)
+
+ /** Merge `t1` into `tp2` if t1 is a subtype of some &-summand of tp2.
+ */
+ private def mergeIfSub(tp1: Type, tp2: Type): Type =
+ if (isSubTypeWhenFrozen(tp1, tp2))
+ if (isSubTypeWhenFrozen(tp2, tp1)) tp2 else tp1 // keep existing type if possible
+ else tp2 match {
+ case tp2 @ AndType(tp21, tp22) =>
+ val lower1 = mergeIfSub(tp1, tp21)
+ if (lower1 eq tp21) tp2
+ else if (lower1.exists) lower1 & tp22
+ else {
+ val lower2 = mergeIfSub(tp1, tp22)
+ if (lower2 eq tp22) tp2
+ else if (lower2.exists) tp21 & lower2
+ else NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ /** Merge `tp1` into `tp2` if tp1 is a supertype of some |-summand of tp2.
+ */
+ private def mergeIfSuper(tp1: Type, tp2: Type): Type =
+ if (isSubTypeWhenFrozen(tp2, tp1))
+ if (isSubTypeWhenFrozen(tp1, tp2)) tp2 else tp1 // keep existing type if possible
+ else tp2 match {
+ case tp2 @ OrType(tp21, tp22) =>
+ val higher1 = mergeIfSuper(tp1, tp21)
+ if (higher1 eq tp21) tp2
+ else if (higher1.exists) higher1 | tp22
+ else {
+ val higher2 = mergeIfSuper(tp1, tp22)
+ if (higher2 eq tp22) tp2
+ else if (higher2.exists) tp21 | higher2
+ else NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ /** Form a normalized conjunction of two types.
+ * Note: For certain types, `&` is distributed inside the type. This holds for
+ * all types which are not value types (e.g. TypeBounds, ClassInfo,
+ * ExprType, MethodType, PolyType). Also, when forming an `&`,
+ * instantiated TypeVars are dereferenced and annotations are stripped.
+ * Finally, refined types with the same refined name are
+ * opportunistically merged.
+ *
+ * Sometimes, the conjunction of two types cannot be formed because
+ * the types are in conflict of each other. In particular:
+ *
+ * 1. Two different class types are conflicting.
+ * 2. A class type conflicts with a type bounds that does not include the class reference.
+ * 3. Two method or poly types with different (type) parameters but the same
+ * signature are conflicting
+ *
+ * In these cases, a MergeError is thrown.
+ */
+ final def andType(tp1: Type, tp2: Type, erased: Boolean = ctx.erasedTypes) = ctx.traceIndented(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) {
+ val t1 = distributeAnd(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = distributeAnd(tp2, tp1)
+ if (t2.exists) t2
+ else if (erased) erasedGlb(tp1, tp2, isJava = false)
+ else liftIfHK(tp1, tp2, AndType(_, _), _ & _)
+ }
+ }
+
+ /** Form a normalized conjunction of two types.
+ * Note: For certain types, `|` is distributed inside the type. This holds for
+ * all types which are not value types (e.g. TypeBounds, ClassInfo,
+ * ExprType, MethodType, PolyType). Also, when forming an `|`,
+ * instantiated TypeVars are dereferenced and annotations are stripped.
+ *
+ * Sometimes, the disjunction of two types cannot be formed because
+ * the types are in conflict of each other. (@see `andType` for an enumeration
+ * of these cases). In cases of conflict a `MergeError` is raised.
+ *
+ * @param erased Apply erasure semantics. If erased is true, instead of creating
+ * an OrType, the lub will be computed using TypeCreator#erasedLub.
+ */
+ final def orType(tp1: Type, tp2: Type, erased: Boolean = ctx.erasedTypes) = {
+ val t1 = distributeOr(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = distributeOr(tp2, tp1)
+ if (t2.exists) t2
+ else if (erased) erasedLub(tp1, tp2)
+ else liftIfHK(tp1, tp2, OrType(_, _), _ | _)
+ }
+ }
+
+ /** `op(tp1, tp2)` unless `tp1` and `tp2` are type-constructors with at least
+ * some unnamed type parameters.
+ * In the latter case, combine `tp1` and `tp2` under a type lambda like this:
+ *
+ * [X1, ..., Xn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn])
+ *
+ * Note: There is a tension between named and positional parameters here, which
+ * is impossible to resolve completely. Say you have
+ *
+ * C[type T], D[type U]
+ *
+ * Then do you expand `C & D` to `[T] -> C[T] & D[T]` or not? Under the named
+ * type parameter interpretation, this would be wrong whereas under the traditional
+ * higher-kinded interpretation this would be required. The problem arises from
+ * allowing both interpretations. A possible remedy is to be somehow stricter
+ * in where we allow which interpretation.
+ */
+ private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type, original: (Type, Type) => Type) = {
+ val tparams1 = tp1.typeParams
+ val tparams2 = tp2.typeParams
+ if (tparams1.isEmpty)
+ if (tparams2.isEmpty) op(tp1, tp2)
+ else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.paramBoundsAsSeenFrom(tp2))))
+ else if (tparams2.isEmpty)
+ original(tp1.appliedTo(tp1.typeParams.map(_.paramBoundsAsSeenFrom(tp1))), tp2)
+ else
+ PolyType(
+ paramNames = tpnme.syntheticTypeParamNames(tparams1.length),
+ variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ (tparam1.paramVariance + tparam2.paramVariance) / 2))(
+ paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ tl.lifted(tparams1, tparam1.paramBoundsAsSeenFrom(tp1)).bounds &
+ tl.lifted(tparams2, tparam2.paramBoundsAsSeenFrom(tp2)).bounds),
+ resultTypeExp = tl =>
+ original(tl.lifted(tparams1, tp1).appliedTo(tl.paramRefs),
+ tl.lifted(tparams2, tp2).appliedTo(tl.paramRefs)))
+ }
+
+ /** Try to distribute `&` inside type, detect and handle conflicts
+ * @pre !(tp1 <: tp2) && !(tp2 <:< tp1) -- these cases were handled before
+ */
+ private def distributeAnd(tp1: Type, tp2: Type): Type = tp1 match {
+ // opportunistically merge same-named refinements
+ // this does not change anything semantically (i.e. merging or not merging
+ // gives =:= types), but it keeps the type smaller.
+ case tp1: RefinedType =>
+ tp2 match {
+ case tp2: RefinedType if tp1.refinedName == tp2.refinedName =>
+ // Given two refinements `T1 { X = S1 }` and `T2 { X = S2 }`, if `S1 =:= S2`
+ // (possibly by instantiating type parameters), rewrite to `T1 & T2 { X = S1 }`.
+ // Otherwise rewrite to `T1 & T2 { X B }` where `B` is the conjunction of
+ // the bounds of `X` in `T1` and `T2`.
+ // The first rule above is contentious because it cuts the constraint set.
+ // But without it we would replace the two aliases by
+ // `T { X >: S1 | S2 <: S1 & S2 }`, which looks weird and is probably
+ // not what's intended.
+ val rinfo1 = tp1.refinedInfo
+ val rinfo2 = tp2.refinedInfo
+ val parent = tp1.parent & tp2.parent
+ val rinfo =
+ if (rinfo1.isAlias && rinfo2.isAlias && isSameType(rinfo1, rinfo2))
+ rinfo1
+ else
+ rinfo1 & rinfo2
+ tp1.derivedRefinedType(parent, tp1.refinedName, rinfo)
+ case _ =>
+ NoType
+ }
+ case tp1: RecType =>
+ tp1.rebind(distributeAnd(tp1.parent, tp2))
+ case ExprType(rt1) =>
+ tp2 match {
+ case ExprType(rt2) =>
+ ExprType(rt1 & rt2)
+ case _ =>
+ rt1 & tp2
+ }
+ case tp1: TypeVar if tp1.isInstantiated =>
+ tp1.underlying & tp2
+ case tp1: AnnotatedType =>
+ tp1.underlying & tp2
+ case _ =>
+ NoType
+ }
+
+ /** Try to distribute `|` inside type, detect and handle conflicts
+ * Note that, unlike for `&`, a disjunction cannot be pushed into
+ * a refined or applied type. Example:
+ *
+ * List[T] | List[U] is not the same as List[T | U].
+ *
+ * The rhs is a proper supertype of the lhs.
+ */
+ private def distributeOr(tp1: Type, tp2: Type): Type = tp1 match {
+ case ExprType(rt1) =>
+ ExprType(rt1 | tp2.widenExpr)
+ case tp1: TypeVar if tp1.isInstantiated =>
+ tp1.underlying | tp2
+ case tp1: AnnotatedType =>
+ tp1.underlying | tp2
+ case _ =>
+ NoType
+ }
+
+ /** Show type, handling type types better than the default */
+ private def showType(tp: Type)(implicit ctx: Context) = tp match {
+ case ClassInfo(_, cls, _, _, _) => cls.showLocated
+ case bounds: TypeBounds => "type bounds" + bounds.show
+ case _ => tp.show
+ }
+
+ /** A comparison function to pick a winner in case of a merge conflict */
+ private def isAsGood(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo =>
+ isSubTypeWhenFrozen(tp1.prefix, tp2.prefix) || (tp1.cls.owner derivesFrom tp2.cls.owner)
+ case _ =>
+ false
+ }
+ case tp1: PolyType =>
+ tp2 match {
+ case tp2: PolyType =>
+ tp1.typeParams.length == tp2.typeParams.length &&
+ isAsGood(tp1.resultType, tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ false
+ }
+ case tp1: MethodType =>
+ tp2 match {
+ case tp2: MethodType =>
+ def asGoodParams(formals1: List[Type], formals2: List[Type]) =
+ (formals2 corresponds formals1)(isSubTypeWhenFrozen)
+ asGoodParams(tp1.paramTypes, tp2.paramTypes) &&
+ (!asGoodParams(tp2.paramTypes, tp1.paramTypes) ||
+ isAsGood(tp1.resultType, tp2.resultType))
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ /** A new type comparer of the same type as this one, using the given context. */
+ def copyIn(ctx: Context) = new TypeComparer(ctx)
+
+ // ----------- Diagnostics --------------------------------------------------
+
+ /** A hook for showing subtype traces. Overridden in ExplainingTypeComparer */
+ def traceIndented[T](str: String)(op: => T): T = op
+
+ private def traceInfo(tp1: Type, tp2: Type) =
+ s"${tp1.show} <:< ${tp2.show}" + {
+ if (ctx.settings.verbose.value || Config.verboseExplainSubtype) {
+ s" ${tp1.getClass}, ${tp2.getClass}" +
+ (if (frozenConstraint) " frozen" else "") +
+ (if (ctx.mode is Mode.TypevarsMissContext) " tvars-miss-ctx" else "")
+ }
+ else ""
+ }
+
+ /** Show subtype goal that led to an assertion failure */
+ def showGoal(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ println(ex"assertion failure for $tp1 <:< $tp2, frozen = $frozenConstraint")
+ def explainPoly(tp: Type) = tp match {
+ case tp: PolyParam => ctx.echo(s"polyparam ${tp.show} found in ${tp.binder.show}")
+ case tp: TypeRef if tp.symbol.exists => ctx.echo(s"typeref ${tp.show} found in ${tp.symbol.owner.show}")
+ case tp: TypeVar => ctx.echo(s"typevar ${tp.show}, origin = ${tp.origin}")
+ case _ => ctx.echo(s"${tp.show} is a ${tp.getClass}")
+ }
+ explainPoly(tp1)
+ explainPoly(tp2)
+ }
+
+ /** Record statistics about the total number of subtype checks
+ * and the number of "successful" subtype checks, i.e. checks
+ * that form part of a subtype derivation tree that's ultimately successful.
+ */
+ def recordStatistics(result: Boolean, prevSuccessCount: Int) = {
+ // Stats.record(s"isSubType ${tp1.show} <:< ${tp2.show}")
+ totalCount += 1
+ if (result) successCount += 1 else successCount = prevSuccessCount
+ if (recCount == 0) {
+ Stats.record("successful subType", successCount)
+ Stats.record("total subType", totalCount)
+ successCount = 0
+ totalCount = 0
+ }
+ }
+}
+
+object TypeComparer {
+
+ /** Show trace of comparison operations when performing `op` as result string */
+ def explained[T](op: Context => T)(implicit ctx: Context): String = {
+ val nestedCtx = ctx.fresh.setTypeComparerFn(new ExplainingTypeComparer(_))
+ op(nestedCtx)
+ nestedCtx.typeComparer.toString
+ }
+}
+
+/** A type comparer that can record traces of subtype operations */
+class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) {
+ private var indent = 0
+ private val b = new StringBuilder
+
+ private var skipped = false
+
+ override def traceIndented[T](str: String)(op: => T): T =
+ if (skipped) op
+ else {
+ indent += 2
+ b append "\n" append (" " * indent) append "==> " append str
+ val res = op
+ b append "\n" append (" " * indent) append "<== " append str append " = " append show(res)
+ indent -= 2
+ res
+ }
+
+ private def show(res: Any) = res match {
+ case res: printing.Showable if !ctx.settings.Yexplainlowlevel.value => res.show
+ case _ => String.valueOf(res)
+ }
+
+ override def isSubType(tp1: Type, tp2: Type) =
+ traceIndented(s"${show(tp1)} <:< ${show(tp2)}${if (Config.verboseExplainSubtype) s" ${tp1.getClass} ${tp2.getClass}" else ""}${if (frozenConstraint) " frozen" else ""}") {
+ super.isSubType(tp1, tp2)
+ }
+
+ override def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean =
+ traceIndented(s"hasMatchingMember(${show(tp1)} . $name, ${show(tp2.refinedInfo)}), member = ${show(tp1.member(name).info)}") {
+ super.hasMatchingMember(name, tp1, tp2)
+ }
+
+ override def lub(tp1: Type, tp2: Type) =
+ traceIndented(s"lub(${show(tp1)}, ${show(tp2)})") {
+ super.lub(tp1, tp2)
+ }
+
+ override def glb(tp1: Type, tp2: Type) =
+ traceIndented(s"glb(${show(tp1)}, ${show(tp2)})") {
+ super.glb(tp1, tp2)
+ }
+
+ override def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean =
+ traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint, constraint = ${ctx.typerState.constraint}") {
+ super.addConstraint(param, bound, fromBelow)
+ }
+
+ override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx)
+
+ override def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ def addendum = ""
+ traceIndented(i"compareHkApply $tp1, $tp2$addendum") {
+ super.compareHkApply2(tp1, tp2, tycon2, args2)
+ }
+ }
+
+ override def toString = "Subtype trace:" + { try b.toString finally b.clear() }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
new file mode 100644
index 000000000..abbacee49
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -0,0 +1,514 @@
+package dotty.tools
+package dotc
+package core
+
+import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Decorators._, Flags.JavaDefined
+import Uniques.unique
+import dotc.transform.ExplicitOuter._
+import dotc.transform.ValueClasses._
+import util.DotClass
+
+/** Erased types are:
+ *
+ * ErasedValueType
+ * TypeRef(prefix is ignored, denot is ClassDenotation)
+ * TermRef(prefix is ignored, denot is SymDenotation)
+ * JavaArrayType
+ * AnnotatedType
+ * MethodType
+ * ThisType
+ * SuperType
+ * ClassInfo (NoPrefix, ...)
+ * NoType
+ * NoPrefix
+ * WildcardType
+ * ErrorType
+ *
+ * only for isInstanceOf, asInstanceOf: PolyType, PolyParam, TypeBounds
+ *
+ */
+object TypeErasure {
+
+ /** A predicate that tests whether a type is a legal erased type. Only asInstanceOf and
+ * isInstanceOf may have types that do not satisfy the predicate.
+ * ErasedValueType is considered an erased type because it is valid after Erasure (it is
+ * eliminated by ElimErasedValueType).
+ */
+ def isErasedType(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case _: ErasedValueType =>
+ true
+ case tp: TypeRef =>
+ tp.symbol.isClass && tp.symbol != defn.AnyClass && tp.symbol != defn.ArrayClass
+ case _: TermRef =>
+ true
+ case JavaArrayType(elem) =>
+ isErasedType(elem)
+ case AnnotatedType(tp, _) =>
+ isErasedType(tp)
+ case ThisType(tref) =>
+ isErasedType(tref)
+ case tp: MethodType =>
+ tp.paramTypes.forall(isErasedType) && isErasedType(tp.resultType)
+ case tp @ ClassInfo(pre, _, parents, decls, _) =>
+ isErasedType(pre) && parents.forall(isErasedType) //&& decls.forall(sym => isErasedType(sym.info)) && isErasedType(tp.selfType)
+ case NoType | NoPrefix | WildcardType | ErrorType | SuperType(_, _) =>
+ true
+ case _ =>
+ false
+ }
+
+ /** A type representing the semi-erasure of a derived value class, see SIP-15
+ * where it's called "C$unboxed" for a class C.
+ * Derived value classes are erased to this type during Erasure (when
+ * semiEraseVCs = true) and subsequently erased to their underlying type
+ * during ElimErasedValueType. This type is outside the normal Scala class
+ * hierarchy: it is a subtype of no other type and is a supertype only of
+ * Nothing. This is because this type is only useful for type adaptation (see
+ * [[Erasure.Boxing#adaptToType]]).
+ *
+ * @param tycon A TypeRef referring to the value class symbol
+ * @param erasedUnderlying The erased type of the single field of the value class
+ */
+ abstract case class ErasedValueType(tycon: TypeRef, erasedUnderlying: Type)
+ extends CachedGroundType with ValueType {
+ override def computeHash = doHash(tycon, erasedUnderlying)
+ }
+
+ final class CachedErasedValueType(tycon: TypeRef, erasedUnderlying: Type)
+ extends ErasedValueType(tycon, erasedUnderlying)
+
+ object ErasedValueType {
+ def apply(tycon: TypeRef, erasedUnderlying: Type)(implicit ctx: Context) = {
+ unique(new CachedErasedValueType(tycon, erasedUnderlying))
+ }
+ }
+
+ private def erasureIdx(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean, wildcardOK: Boolean) =
+ (if (isJava) 1 else 0) +
+ (if (semiEraseVCs) 2 else 0) +
+ (if (isConstructor) 4 else 0) +
+ (if (wildcardOK) 8 else 0)
+
+ private val erasures = new Array[TypeErasure](16)
+
+ for {
+ isJava <- List(false, true)
+ semiEraseVCs <- List(false, true)
+ isConstructor <- List(false, true)
+ wildcardOK <- List(false, true)
+ } erasures(erasureIdx(isJava, semiEraseVCs, isConstructor, wildcardOK)) =
+ new TypeErasure(isJava, semiEraseVCs, isConstructor, wildcardOK)
+
+ /** Produces an erasure function. See the documentation of the class [[TypeErasure]]
+ * for a description of each parameter.
+ */
+ private def erasureFn(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean, wildcardOK: Boolean): TypeErasure =
+ erasures(erasureIdx(isJava, semiEraseVCs, isConstructor, wildcardOK))
+
+ /** The current context with a phase no later than erasure */
+ private def erasureCtx(implicit ctx: Context) =
+ if (ctx.erasedTypes) ctx.withPhase(ctx.erasurePhase) else ctx
+
+ /** The standard erasure of a Scala type. Value classes are erased as normal classes.
+ *
+ * @param tp The type to erase.
+ */
+ def erasure(tp: Type)(implicit ctx: Context): Type =
+ erasureFn(isJava = false, semiEraseVCs = false, isConstructor = false, wildcardOK = false)(tp)(erasureCtx)
+
+ /** The value class erasure of a Scala type, where value classes are semi-erased to
+ * ErasedValueType (they will be fully erased in [[ElimErasedValueType]]).
+ *
+ * @param tp The type to erase.
+ */
+ def valueErasure(tp: Type)(implicit ctx: Context): Type =
+ erasureFn(isJava = false, semiEraseVCs = true, isConstructor = false, wildcardOK = false)(tp)(erasureCtx)
+
+ def sigName(tp: Type, isJava: Boolean)(implicit ctx: Context): TypeName = {
+ val normTp =
+ if (tp.isRepeatedParam) {
+ val seqClass = if (isJava) defn.ArrayClass else defn.SeqClass
+ tp.translateParameterized(defn.RepeatedParamClass, seqClass)
+ }
+ else tp
+ val erase = erasureFn(isJava, semiEraseVCs = false, isConstructor = false, wildcardOK = true)
+ erase.sigName(normTp)(erasureCtx)
+ }
+
+ /** The erasure of a top-level reference. Differs from normal erasure in that
+ * TermRefs are kept instead of being widened away.
+ */
+ def erasedRef(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: TermRef =>
+ assert(tp.symbol.exists, tp)
+ val tp1 = ctx.makePackageObjPrefixExplicit(tp)
+ if (tp1 ne tp) erasedRef(tp1)
+ else TermRef(erasedRef(tp.prefix), tp.symbol.asTerm)
+ case tp: ThisType =>
+ tp
+ case tp =>
+ valueErasure(tp)
+ }
+
+ /** The symbol's erased info. This is the type's erasure, except for the following symbols:
+ *
+ * - For $asInstanceOf : [T]T
+ * - For $isInstanceOf : [T]Boolean
+ * - For all abstract types : = ?
+ * - For companion methods : the erasure of their type with semiEraseVCs = false.
+ * The signature of these methods are used to keep a
+ * link between companions and should not be semi-erased.
+ * - For Java-defined symbols: : the erasure of their type with isJava = true,
+ * semiEraseVCs = false. Semi-erasure never happens in Java.
+ * - For all other symbols : the semi-erasure of their types, with
+ * isJava, isConstructor set according to symbol.
+ */
+ def transformInfo(sym: Symbol, tp: Type)(implicit ctx: Context): Type = {
+ val isJava = sym is JavaDefined
+ val semiEraseVCs = !isJava && !sym.isCompanionMethod
+ val erase = erasureFn(isJava, semiEraseVCs, sym.isConstructor, wildcardOK = false)
+
+ def eraseParamBounds(tp: PolyType): Type =
+ tp.derivedPolyType(
+ tp.paramNames, tp.paramNames map (Function.const(TypeBounds.upper(defn.ObjectType))), tp.resultType)
+
+ if (defn.isPolymorphicAfterErasure(sym)) eraseParamBounds(sym.info.asInstanceOf[PolyType])
+ else if (sym.isAbstractType) TypeAlias(WildcardType)
+ else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(erasureCtx))
+ else erase.eraseInfo(tp, sym)(erasureCtx) match {
+ case einfo: MethodType if sym.isGetter && einfo.resultType.isRef(defn.UnitClass) =>
+ MethodType(Nil, defn.BoxedUnitType)
+ case einfo =>
+ einfo
+ }
+ }
+
+ /** Is `tp` an abstract type or polymorphic type parameter that has `Any`, `AnyVal`,
+ * or a universal trait as upper bound and that is not Java defined? Arrays of such types are
+ * erased to `Object` instead of `Object[]`.
+ */
+ def isUnboundedGeneric(tp: Type)(implicit ctx: Context): Boolean = tp.dealias match {
+ case tp: TypeRef =>
+ !tp.symbol.isClass &&
+ !tp.derivesFrom(defn.ObjectClass) &&
+ !tp.symbol.is(JavaDefined)
+ case tp: PolyParam =>
+ !tp.derivesFrom(defn.ObjectClass) &&
+ !tp.binder.resultType.isInstanceOf[JavaMethodType]
+ case tp: TypeAlias => isUnboundedGeneric(tp.alias)
+ case tp: TypeBounds => !tp.hi.derivesFrom(defn.ObjectClass)
+ case tp: TypeProxy => isUnboundedGeneric(tp.underlying)
+ case tp: AndType => isUnboundedGeneric(tp.tp1) || isUnboundedGeneric(tp.tp2)
+ case tp: OrType => isUnboundedGeneric(tp.tp1) && isUnboundedGeneric(tp.tp2)
+ case _ => false
+ }
+
+ /** The erased least upper bound is computed as follows
+ * - if both argument are arrays of objects, an array of the lub of the element types
+ * - if both arguments are arrays of same primitives, an array of this primitive
+ * - if one argument is array of primitives and the other is array of objects, Object
+ * - if one argument is an array, Object
+ * - otherwise a common superclass or trait S of the argument classes, with the
+ * following two properties:
+ * S is minimal: no other common superclass or trait derives from S]
+ * S is last : in the linearization of the first argument type `tp1`
+ * there are no minimal common superclasses or traits that
+ * come after S.
+ * (the reason to pick last is that we prefer classes over traits that way).
+ */
+ def erasedLub(tp1: Type, tp2: Type)(implicit ctx: Context): Type = tp1 match {
+ case JavaArrayType(elem1) =>
+ import dotty.tools.dotc.transform.TypeUtils._
+ tp2 match {
+ case JavaArrayType(elem2) =>
+ if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) {
+ if (elem1.classSymbol eq elem2.classSymbol) // same primitive
+ JavaArrayType(elem1)
+ else defn.ObjectType
+ } else JavaArrayType(erasedLub(elem1, elem2))
+ case _ => defn.ObjectType
+ }
+ case _ =>
+ tp2 match {
+ case JavaArrayType(_) => defn.ObjectType
+ case _ =>
+ val cls2 = tp2.classSymbol
+ def loop(bcs: List[ClassSymbol], bestSoFar: ClassSymbol): ClassSymbol = bcs match {
+ case bc :: bcs1 =>
+ if (cls2.derivesFrom(bc))
+ if (!bc.is(Trait) && bc != defn.AnyClass) bc
+ else loop(bcs1, if (bestSoFar.derivesFrom(bc)) bestSoFar else bc)
+ else
+ loop(bcs1, bestSoFar)
+ case nil =>
+ bestSoFar
+ }
+ val t = loop(tp1.baseClasses, defn.ObjectClass)
+ if (t eq defn.AnyValClass)
+ // while AnyVal is a valid common super class for primitives it does not exist after erasure
+ defn.ObjectType
+ else t.typeRef
+ }
+ }
+
+ /** The erased greatest lower bound picks one of the two argument types. It prefers, in this order:
+ * - arrays over non-arrays
+ * - subtypes over supertypes, unless isJava is set
+ * - real classes over traits
+ */
+ def erasedGlb(tp1: Type, tp2: Type, isJava: Boolean)(implicit ctx: Context): Type = tp1 match {
+ case JavaArrayType(elem1) =>
+ tp2 match {
+ case JavaArrayType(elem2) => JavaArrayType(erasedGlb(elem1, elem2, isJava))
+ case _ => tp1
+ }
+ case _ =>
+ tp2 match {
+ case JavaArrayType(_) => tp2
+ case _ =>
+ val tsym1 = tp1.typeSymbol
+ val tsym2 = tp2.typeSymbol
+ if (!tsym2.exists) tp1
+ else if (!tsym1.exists) tp2
+ else if (!isJava && tsym1.derivesFrom(tsym2)) tp1
+ else if (!isJava && tsym2.derivesFrom(tsym1)) tp2
+ else if (tp1.typeSymbol.isRealClass) tp1
+ else if (tp2.typeSymbol.isRealClass) tp2
+ else tp1
+ }
+ }
+
+ /** Does the (possibly generic) type `tp` have the same erasure in all its
+ * possible instantiations?
+ */
+ def hasStableErasure(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp: TypeRef =>
+ tp.info match {
+ case TypeAlias(alias) => hasStableErasure(alias)
+ case _: ClassInfo => true
+ case _ => false
+ }
+ case tp: PolyParam => false
+ case tp: TypeProxy => hasStableErasure(tp.superType)
+ case tp: AndOrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2)
+ case _ => false
+ }
+}
+import TypeErasure._
+
+/**
+ * @param isJava Arguments should be treated the way Java does it
+ * @param semiEraseVCs If true, value classes are semi-erased to ErasedValueType
+ * (they will be fully erased in [[ElimErasedValueType]]).
+ * If false, they are erased like normal classes.
+ * @param isConstructor Argument forms part of the type of a constructor
+ * @param wildcardOK Wildcards are acceptable (true when using the erasure
+ * for computing a signature name).
+ */
+class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean, wildcardOK: Boolean) extends DotClass {
+
+ /** The erasure |T| of a type T. This is:
+ *
+ * - For a refined type scala.Array+[T]:
+ * - if T is Nothing or Null, []Object
+ * - otherwise, if T <: Object, []|T|
+ * - otherwise, if T is a type paramter coming from Java, []Object
+ * - otherwise, Object
+ * - For a term ref p.x, the type <noprefix> # x.
+ * - For a typeref scala.Any, scala.AnyVal or scala.Singleton: |java.lang.Object|
+ * - For a typeref scala.Unit, |scala.runtime.BoxedUnit|.
+ * - For a typeref P.C where C refers to a class, <noprefix> # C.
+ * - For a typeref P.C where C refers to an alias type, the erasure of C's alias.
+ * - For a typeref P.C where C refers to an abstract type, the erasure of C's upper bound.
+ * - For a this-type C.this, the type itself.
+ * - For all other type proxies: The erasure of the underlying type.
+ * - For T1 & T2, the erased glb of |T1| and |T2| (see erasedGlb)
+ * - For T1 | T2, the first base class in the linearization of T which is also a base class of T2
+ * - For => T, ()T
+ * - For a method type (Fs)scala.Unit, (|Fs|)scala.Unit.
+ * - For any other uncurried method type (Fs)T, (|Fs|)|T|.
+ * - For a curried method type (Fs1)(Fs2)T, (|Fs1|,Es2)ET where (Es2)ET = |(Fs2)T|.
+ * - For a polymorphic type [Ts](Ps)T, |(Ps)T|
+ * _ For a polymorphic type [Ts]T where T is not a method type, ()|T|
+ * - For the class info type of java.lang.Object, the same type without any parents.
+ * - For a class info type of a value class, the same type without any parents.
+ * - For any other class info type with parents Ps, the same type with
+ * parents |Ps|, but with duplicate references of Object removed.
+ * - For NoType or NoPrefix, the type itself.
+ * - For any other type, exception.
+ */
+ private def apply(tp: Type)(implicit ctx: Context): Type = tp match {
+ case _: ErasedValueType =>
+ tp
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (!sym.isClass) this(tp.info)
+ else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClassRef(tp)
+ else if (sym == defn.ArrayClass) apply(tp.appliedTo(TypeBounds.empty)) // i966 shows that we can hit a raw Array type.
+ else eraseNormalClassRef(tp)
+ case tp: RefinedType =>
+ val parent = tp.parent
+ if (parent isRef defn.ArrayClass) eraseArray(tp)
+ else this(parent)
+ case _: TermRef | _: ThisType =>
+ this(tp.widen)
+ case SuperType(thistpe, supertpe) =>
+ SuperType(this(thistpe), this(supertpe))
+ case ExprType(rt) =>
+ defn.FunctionClass(0).typeRef
+ case AndType(tp1, tp2) =>
+ erasedGlb(this(tp1), this(tp2), isJava)
+ case OrType(tp1, tp2) =>
+ ctx.typeComparer.orType(this(tp1), this(tp2), erased = true)
+ case tp: MethodType =>
+ def paramErasure(tpToErase: Type) =
+ erasureFn(tp.isJava, semiEraseVCs, isConstructor, wildcardOK)(tpToErase)
+ val formals = tp.paramTypes.mapConserve(paramErasure)
+ eraseResult(tp.resultType) match {
+ case rt: MethodType =>
+ tp.derivedMethodType(tp.paramNames ++ rt.paramNames, formals ++ rt.paramTypes, rt.resultType)
+ case rt =>
+ tp.derivedMethodType(tp.paramNames, formals, rt)
+ }
+ case tp @ ClassInfo(pre, cls, classParents, decls, _) =>
+ if (cls is Package) tp
+ else {
+ def eraseTypeRef(p: TypeRef) = this(p).asInstanceOf[TypeRef]
+ val parents: List[TypeRef] =
+ if ((cls eq defn.ObjectClass) || cls.isPrimitiveValueClass) Nil
+ else classParents.mapConserve(eraseTypeRef) match {
+ case tr :: trs1 =>
+ assert(!tr.classSymbol.is(Trait), cls)
+ val tr1 = if (cls is Trait) defn.ObjectType else tr
+ tr1 :: trs1.filterNot(_ isRef defn.ObjectClass)
+ case nil => nil
+ }
+ val erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass)
+ tp.derivedClassInfo(NoPrefix, parents, erasedDecls, erasedRef(tp.selfType))
+ // can't replace selftype by NoType because this would lose the sourceModule link
+ }
+ case NoType | NoPrefix | ErrorType | JavaArrayType(_) =>
+ tp
+ case tp: WildcardType if wildcardOK =>
+ tp
+ case tp: TypeProxy =>
+ this(tp.underlying)
+ }
+
+ private def eraseArray(tp: RefinedType)(implicit ctx: Context) = {
+ val defn.ArrayOf(elemtp) = tp
+ def arrayErasure(tpToErase: Type) =
+ erasureFn(isJava, semiEraseVCs = false, isConstructor, wildcardOK)(tpToErase)
+ if (elemtp derivesFrom defn.NullClass) JavaArrayType(defn.ObjectType)
+ else if (isUnboundedGeneric(elemtp) && !isJava) defn.ObjectType
+ else JavaArrayType(arrayErasure(elemtp))
+ }
+
+ /** The erasure of a symbol's info. This is different from `apply` in the way `ExprType`s and
+ * `PolyType`s are treated. `eraseInfo` maps them them to method types, whereas `apply` maps them
+ * to the underlying type.
+ */
+ def eraseInfo(tp: Type, sym: Symbol)(implicit ctx: Context) = tp match {
+ case ExprType(rt) =>
+ if (sym is Param) apply(tp)
+ // Note that params with ExprTypes are eliminated by ElimByName,
+ // but potentially re-introduced by ResolveSuper, when we add
+ // forwarders to mixin methods.
+ // See doc comment for ElimByName for speculation how we could improve this.
+ else MethodType(Nil, Nil, eraseResult(rt))
+ case tp: PolyType =>
+ eraseResult(tp.resultType) match {
+ case rt: MethodType => rt
+ case rt => MethodType(Nil, Nil, rt)
+ }
+ case tp => this(tp)
+ }
+
+ private def eraseDerivedValueClassRef(tref: TypeRef)(implicit ctx: Context): Type = {
+ val cls = tref.symbol.asClass
+ val underlying = underlyingOfValueClass(cls)
+ if (underlying.exists) ErasedValueType(tref, valueErasure(underlying))
+ else NoType
+ }
+
+ private def eraseNormalClassRef(tref: TypeRef)(implicit ctx: Context): Type = {
+ val cls = tref.symbol.asClass
+ (if (cls.owner is Package) normalizeClass(cls) else cls).typeRef
+ }
+
+ /** The erasure of a function result type. */
+ private def eraseResult(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: TypeRef =>
+ val sym = tp.typeSymbol
+ if (sym eq defn.UnitClass) sym.typeRef
+ // For a value class V, "new V(x)" should have type V for type adaptation to work
+ // correctly (see SIP-15 and [[Erasure.Boxing.adaptToType]]), so the return type of a
+ // constructor method should not be semi-erased.
+ else if (isConstructor && isDerivedValueClass(sym)) eraseNormalClassRef(tp)
+ else this(tp)
+ case RefinedType(parent, _, _) if !(parent isRef defn.ArrayClass) =>
+ eraseResult(parent)
+ case _ =>
+ this(tp)
+ }
+
+ private def normalizeClass(cls: ClassSymbol)(implicit ctx: Context): ClassSymbol = {
+ if (cls.owner == defn.ScalaPackageClass) {
+ if (cls == defn.AnyClass || cls == defn.AnyValClass || cls == defn.SingletonClass)
+ return defn.ObjectClass
+ if (cls == defn.UnitClass)
+ return defn.BoxedUnitClass
+ }
+ cls
+ }
+
+ /** The name of the type as it is used in `Signature`s.
+ * Need to ensure correspondence with erasure!
+ */
+ private def sigName(tp: Type)(implicit ctx: Context): TypeName = try {
+ tp match {
+ case ErasedValueType(_, underlying) =>
+ sigName(underlying)
+ case tp: TypeRef =>
+ if (!tp.denot.exists) throw new MissingType(tp.prefix, tp.name)
+ val sym = tp.symbol
+ if (!sym.isClass) {
+ val info = tp.info
+ if (!info.exists) assert(false, "undefined: $tp with symbol $sym")
+ return sigName(info)
+ }
+ if (isDerivedValueClass(sym)) {
+ val erasedVCRef = eraseDerivedValueClassRef(tp)
+ if (erasedVCRef.exists) return sigName(erasedVCRef)
+ }
+ normalizeClass(sym.asClass).fullName.asTypeName
+ case defn.ArrayOf(elem) =>
+ sigName(this(tp))
+ case JavaArrayType(elem) =>
+ sigName(elem) ++ "[]"
+ case tp: TermRef =>
+ sigName(tp.widen)
+ case ExprType(rt) =>
+ sigName(defn.FunctionOf(Nil, rt))
+ case tp: TypeVar =>
+ val inst = tp.instanceOpt
+ if (inst.exists) sigName(inst) else tpnme.Uninstantiated
+ case tp: TypeProxy =>
+ sigName(tp.underlying)
+ case ErrorType | WildcardType =>
+ tpnme.WILDCARD
+ case tp: WildcardType =>
+ sigName(tp.optBounds)
+ case _ =>
+ val erased = this(tp)
+ assert(erased ne tp, tp)
+ sigName(erased)
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(s"no sig for $tp")
+ throw ex
+ }
+
+
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
new file mode 100644
index 000000000..92e5f9d57
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
@@ -0,0 +1,554 @@
+package dotty.tools
+package dotc
+package core
+
+import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
+import SymDenotations._, Denotations.SingleDenotation
+import config.Printers.typr
+import util.Positions._
+import NameOps._
+import Decorators._
+import StdNames._
+import Annotations._
+import util.SimpleMap
+import collection.mutable
+import ast.tpd._
+
+trait TypeOps { this: Context => // TODO: Make standalone object.
+
+ /** The type `tp` as seen from prefix `pre` and owner `cls`. See the spec
+ * for what this means. Called very often, so the code is optimized heavily.
+ *
+ * A tricky aspect is what to do with unstable prefixes. E.g. say we have a class
+ *
+ * class C { type T; def f(x: T): T }
+ *
+ * and an expression `e` of type `C`. Then computing the type of `e.f` leads
+ * to the query asSeenFrom(`C`, `(x: T)T`). What should its result be? The
+ * naive answer `(x: C#T)C#T` is incorrect given that we treat `C#T` as the existential
+ * `exists(c: C)c.T`. What we need to do instead is to skolemize the existential. So
+ * the answer would be `(x: c.T)c.T` for some (unknown) value `c` of type `C`.
+ * `c.T` is expressed in the compiler as a skolem type `Skolem(C)`.
+ *
+ * Now, skolemization is messy and expensive, so we want to do it only if we absolutely
+ * must. Also, skolemizing immediately would mean that asSeenFrom was no longer
+ * idempotent - each call would return a type with a different skolem.
+ * Instead we produce an annotated type that marks the prefix as unsafe:
+ *
+ * (x: (C @ UnsafeNonvariant)#T)C#T
+ *
+ * We also set a global state flag `unsafeNonvariant` to the current run.
+ * When typing a Select node, typer will check that flag, and if it
+ * points to the current run will scan the result type of the select for
+ * @UnsafeNonvariant annotations. If it finds any, it will introduce a skolem
+ * constant for the prefix and try again.
+ *
+ * The scheme is efficient in particular because we expect that unsafe situations are rare;
+ * most compiles would contain none, so no scanning would be necessary.
+ */
+ final def asSeenFrom(tp: Type, pre: Type, cls: Symbol): Type =
+ asSeenFrom(tp, pre, cls, null)
+
+ /** Helper method, taking a map argument which is instantiated only for more
+ * complicated cases of asSeenFrom.
+ */
+ private def asSeenFrom(tp: Type, pre: Type, cls: Symbol, theMap: AsSeenFromMap): Type = {
+
+ /** Map a `C.this` type to the right prefix. If the prefix is unstable and
+ * the `C.this` occurs in nonvariant or contravariant position, mark the map
+ * to be unstable.
+ */
+ def toPrefix(pre: Type, cls: Symbol, thiscls: ClassSymbol): Type = /*>|>*/ ctx.conditionalTraceIndented(TypeOps.track, s"toPrefix($pre, $cls, $thiscls)") /*<|<*/ {
+ if ((pre eq NoType) || (pre eq NoPrefix) || (cls is PackageClass))
+ tp
+ else pre match {
+ case pre: SuperType => toPrefix(pre.thistpe, cls, thiscls)
+ case _ =>
+ if (thiscls.derivesFrom(cls) && pre.baseTypeRef(thiscls).exists) {
+ if (theMap != null && theMap.currentVariance <= 0 && !isLegalPrefix(pre)) {
+ ctx.base.unsafeNonvariant = ctx.runId
+ AnnotatedType(pre, Annotation(defn.UnsafeNonvariantAnnot, Nil))
+ }
+ else pre
+ }
+ else if ((pre.termSymbol is Package) && !(thiscls is Package))
+ toPrefix(pre.select(nme.PACKAGE), cls, thiscls)
+ else
+ toPrefix(pre.baseTypeRef(cls).normalizedPrefix, cls.owner, thiscls)
+ }
+ }
+
+ /*>|>*/ ctx.conditionalTraceIndented(TypeOps.track, s"asSeen ${tp.show} from (${pre.show}, ${cls.show})", show = true) /*<|<*/ { // !!! DEBUG
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (sym.isStatic) tp
+ else {
+ val pre1 = asSeenFrom(tp.prefix, pre, cls, theMap)
+ if (pre1.isUnsafeNonvariant)
+ pre1.member(tp.name).info match {
+ case TypeAlias(alias) =>
+ // try to follow aliases of this will avoid skolemization.
+ return alias
+ case _ =>
+ }
+ tp.derivedSelect(pre1)
+ }
+ case tp: ThisType =>
+ toPrefix(pre, cls, tp.cls)
+ case _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(
+ asSeenFrom(tp.parent, pre, cls, theMap),
+ tp.refinedName,
+ asSeenFrom(tp.refinedInfo, pre, cls, theMap))
+ case tp: TypeAlias if tp.variance == 1 => // if variance != 1, need to do the variance calculation
+ tp.derivedTypeAlias(asSeenFrom(tp.alias, pre, cls, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new AsSeenFromMap(pre, cls))
+ .mapOver(tp)
+ }
+ }
+ }
+
+ private def isLegalPrefix(pre: Type)(implicit ctx: Context) =
+ pre.isStable || !ctx.phase.isTyper
+
+ /** The TypeMap handling the asSeenFrom in more complicated cases */
+ class AsSeenFromMap(pre: Type, cls: Symbol) extends TypeMap {
+ def apply(tp: Type) = asSeenFrom(tp, pre, cls, this)
+
+ /** A method to export the current variance of the map */
+ def currentVariance = variance
+ }
+
+ /** Approximate a type `tp` with a type that does not contain skolem types. */
+ object deskolemize extends ApproximatingTypeMap {
+ private var seen: Set[SkolemType] = Set()
+ def apply(tp: Type) = tp match {
+ case tp: SkolemType =>
+ if (seen contains tp) NoType
+ else {
+ val saved = seen
+ seen += tp
+ try approx(hi = tp.info)
+ finally seen = saved
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** Implementation of Types#simplified */
+ final def simplify(tp: Type, theMap: SimplifyMap): Type = tp match {
+ case tp: NamedType =>
+ if (tp.symbol.isStatic) tp
+ else tp.derivedSelect(simplify(tp.prefix, theMap)) match {
+ case tp1: NamedType if tp1.denotationIsCurrent =>
+ val tp2 = tp1.reduceProjection
+ //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2")
+ tp2
+ case tp1 => tp1
+ }
+ case tp: PolyParam =>
+ typerState.constraint.typeVarOfParam(tp) orElse tp
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(simplify(tp.alias, theMap))
+ case AndType(l, r) =>
+ simplify(l, theMap) & simplify(r, theMap)
+ case OrType(l, r) =>
+ simplify(l, theMap) | simplify(r, theMap)
+ case _ =>
+ (if (theMap != null) theMap else new SimplifyMap).mapOver(tp)
+ }
+
+ class SimplifyMap extends TypeMap {
+ def apply(tp: Type) = simplify(tp, this)
+ }
+
+ /** Approximate union type by intersection of its dominators.
+ * That is, replace a union type Tn | ... | Tn
+ * by the smallest intersection type of base-class instances of T1,...,Tn.
+ * Example: Given
+ *
+ * trait C[+T]
+ * trait D
+ * class A extends C[A] with D
+ * class B extends C[B] with D with E
+ *
+ * we approximate `A | B` by `C[A | B] with D`
+ */
+ def orDominator(tp: Type): Type = {
+
+ /** a faster version of cs1 intersect cs2 */
+ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = {
+ val cs2AsSet = new util.HashSet[ClassSymbol](100)
+ cs2.foreach(cs2AsSet.addEntry)
+ cs1.filter(cs2AsSet.contains)
+ }
+
+ /** The minimal set of classes in `cs` which derive all other classes in `cs` */
+ def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match {
+ case c :: rest =>
+ val accu1 = if (accu exists (_ derivesFrom c)) accu else c :: accu
+ if (cs == c.baseClasses) accu1 else dominators(rest, accu1)
+ }
+
+ def mergeRefined(tp1: Type, tp2: Type): Type = {
+ def fail = throw new AssertionError(i"Failure to join alternatives $tp1 and $tp2")
+ tp1 match {
+ case tp1 @ RefinedType(parent1, name1, rinfo1) =>
+ tp2 match {
+ case RefinedType(parent2, `name1`, rinfo2) =>
+ tp1.derivedRefinedType(
+ mergeRefined(parent1, parent2), name1, rinfo1 | rinfo2)
+ case _ => fail
+ }
+ case tp1 @ TypeRef(pre1, name1) =>
+ tp2 match {
+ case tp2 @ TypeRef(pre2, `name1`) =>
+ tp1.derivedSelect(pre1 | pre2)
+ case _ => fail
+ }
+ case _ => fail
+ }
+ }
+
+ def approximateOr(tp1: Type, tp2: Type): Type = {
+ def isClassRef(tp: Type): Boolean = tp match {
+ case tp: TypeRef => tp.symbol.isClass
+ case tp: RefinedType => isClassRef(tp.parent)
+ case _ => false
+ }
+
+ tp1 match {
+ case tp1: RecType =>
+ tp1.rebind(approximateOr(tp1.parent, tp2))
+ case tp1: TypeProxy if !isClassRef(tp1) =>
+ orDominator(tp1.superType | tp2)
+ case _ =>
+ tp2 match {
+ case tp2: RecType =>
+ tp2.rebind(approximateOr(tp1, tp2.parent))
+ case tp2: TypeProxy if !isClassRef(tp2) =>
+ orDominator(tp1 | tp2.superType)
+ case _ =>
+ val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect)
+ val doms = dominators(commonBaseClasses, Nil)
+ def baseTp(cls: ClassSymbol): Type = {
+ val base =
+ if (tp1.typeParams.nonEmpty) tp.baseTypeRef(cls)
+ else tp.baseTypeWithArgs(cls)
+ base.mapReduceOr(identity)(mergeRefined)
+ }
+ doms.map(baseTp).reduceLeft(AndType.apply)
+ }
+ }
+ }
+
+ tp match {
+ case tp: OrType =>
+ approximateOr(tp.tp1, tp.tp2)
+ case _ =>
+ tp
+ }
+ }
+
+ /** Given a disjunction T1 | ... | Tn of types with potentially embedded
+ * type variables, constrain type variables further if this eliminates
+ * some of the branches of the disjunction. Do this also for disjunctions
+ * embedded in intersections, as parents in refinements, and in recursive types.
+ *
+ * For instance, if `A` is an unconstrained type variable, then
+ *
+ * ArrayBuffer[Int] | ArrayBuffer[A]
+ *
+ * is approximated by constraining `A` to be =:= to `Int` and returning `ArrayBuffer[Int]`
+ * instead of `ArrayBuffer[_ >: Int | A <: Int & A]`
+ */
+ def harmonizeUnion(tp: Type): Type = tp match {
+ case tp: OrType =>
+ joinIfScala2(typeComparer.fluidly(tp.tp1 | tp.tp2))
+ case tp @ AndType(tp1, tp2) =>
+ tp derived_& (harmonizeUnion(tp1), harmonizeUnion(tp2))
+ case tp: RefinedType =>
+ tp.derivedRefinedType(harmonizeUnion(tp.parent), tp.refinedName, tp.refinedInfo)
+ case tp: RecType =>
+ tp.rebind(harmonizeUnion(tp.parent))
+ case _ =>
+ tp
+ }
+
+ /** Under -language:Scala2: Replace or-types with their joins */
+ private def joinIfScala2(tp: Type) = tp match {
+ case tp: OrType if scala2Mode => tp.join
+ case _ => tp
+ }
+
+ /** Not currently needed:
+ *
+ def liftToRec(f: (Type, Type) => Type)(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ def f2(tp1: Type, tp2: Type): Type = tp2 match {
+ case tp2: RecType => tp2.rebind(f(tp1, tp2.parent))
+ case _ => f(tp1, tp2)
+ }
+ tp1 match {
+ case tp1: RecType => tp1.rebind(f2(tp1.parent, tp2))
+ case _ => f2(tp1, tp2)
+ }
+ }
+ */
+
+ private def enterArgBinding(formal: Symbol, info: Type, cls: ClassSymbol, decls: Scope) = {
+ val lazyInfo = new LazyType { // needed so we do not force `formal`.
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ denot setFlag formal.flags & RetainedTypeArgFlags
+ denot.info = info
+ }
+ }
+ val sym = ctx.newSymbol(
+ cls, formal.name,
+ formal.flagsUNSAFE & RetainedTypeArgFlags | BaseTypeArg | Override,
+ lazyInfo,
+ coord = cls.coord)
+ cls.enter(sym, decls)
+ }
+
+ /** If `tpe` is of the form `p.x` where `p` refers to a package
+ * but `x` is not owned by a package, expand it to
+ *
+ * p.package.x
+ */
+ def makePackageObjPrefixExplicit(tpe: NamedType): Type = {
+ def tryInsert(pkgClass: SymDenotation): Type = pkgClass match {
+ case pkgCls: PackageClassDenotation if !(tpe.symbol.maybeOwner is Package) =>
+ tpe.derivedSelect(pkgCls.packageObj.valRef)
+ case _ =>
+ tpe
+ }
+ tpe.prefix match {
+ case pre: ThisType if pre.cls is Package => tryInsert(pre.cls)
+ case pre: TermRef if pre.symbol is Package => tryInsert(pre.symbol.moduleClass)
+ case _ => tpe
+ }
+ }
+
+ /** If we have member definitions
+ *
+ * type argSym v= from
+ * type from v= to
+ *
+ * where the variances of both alias are the same, then enter a new definition
+ *
+ * type argSym v= to
+ *
+ * unless a definition for `argSym` already exists in the current scope.
+ */
+ def forwardRef(argSym: Symbol, from: Symbol, to: TypeBounds, cls: ClassSymbol, decls: Scope) =
+ argSym.info match {
+ case info @ TypeBounds(lo2 @ TypeRef(_: ThisType, name), hi2) =>
+ if (name == from.name &&
+ (lo2 eq hi2) &&
+ info.variance == to.variance &&
+ !decls.lookup(argSym.name).exists) {
+ // println(s"short-circuit ${argSym.name} was: ${argSym.info}, now: $to")
+ enterArgBinding(argSym, to, cls, decls)
+ }
+ case _ =>
+ }
+
+
+ /** Normalize a list of parent types of class `cls` that may contain refinements
+ * to a list of typerefs referring to classes, by converting all refinements to member
+ * definitions in scope `decls`. Can add members to `decls` as a side-effect.
+ */
+ def normalizeToClassRefs(parents: List[Type], cls: ClassSymbol, decls: Scope): List[TypeRef] = {
+
+ /** If we just entered the type argument binding
+ *
+ * type From = To
+ *
+ * and there is a type argument binding in a parent in `prefs` of the form
+ *
+ * type X = From
+ *
+ * then also add the binding
+ *
+ * type X = To
+ *
+ * to the current scope, provided (1) variances of both aliases are the same, and
+ * (2) X is not yet defined in current scope. This "short-circuiting" prevents
+ * long chains of aliases which would have to be traversed in type comparers.
+ *
+ * Note: Test i1401.scala shows that `forwardRefs` is also necessary
+ * for typechecking in the case where self types refer to type parameters
+ * that are upper-bounded by subclass instances.
+ */
+ def forwardRefs(from: Symbol, to: Type, prefs: List[TypeRef]) = to match {
+ case to @ TypeBounds(lo1, hi1) if lo1 eq hi1 =>
+ for (pref <- prefs) {
+ def forward(): Unit =
+ for (argSym <- pref.decls)
+ if (argSym is BaseTypeArg)
+ forwardRef(argSym, from, to, cls, decls)
+ pref.info match {
+ case info: TempClassInfo => info.addSuspension(forward)
+ case _ => forward()
+ }
+ }
+ case _ =>
+ }
+
+ // println(s"normalizing $parents of $cls in ${cls.owner}") // !!! DEBUG
+
+ // A map consolidating all refinements arising from parent type parameters
+ var refinements: SimpleMap[TypeName, Type] = SimpleMap.Empty
+
+ // A map of all formal type parameters of base classes that get refined
+ var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter
+
+ // Strip all refinements from parent type, populating `refinements` and `formals` maps.
+ def normalizeToRef(tp: Type): TypeRef = {
+ def fail = throw new TypeError(s"unexpected parent type: $tp")
+ tp.dealias match {
+ case tp: TypeRef =>
+ tp
+ case tp @ RefinedType(tp1, name: TypeName, rinfo) =>
+ rinfo match {
+ case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) =>
+ // Don't record refinements of the form X = this.X (These can arise using named parameters).
+ typr.println(s"dropping refinement $tp")
+ case _ =>
+ val prevInfo = refinements(name)
+ refinements = refinements.updated(name,
+ if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo)
+ formals = formals.updated(name, tp1.typeParamNamed(name))
+ }
+ normalizeToRef(tp1)
+ case ErrorType =>
+ defn.AnyType
+ case AnnotatedType(tpe, _) =>
+ normalizeToRef(tpe)
+ case HKApply(tycon: TypeRef, args) =>
+ tycon.info match {
+ case TypeAlias(alias) => normalizeToRef(alias.appliedTo(args))
+ case _ => fail
+ }
+ case _ =>
+ fail
+ }
+ }
+
+ val parentRefs = parents map normalizeToRef
+
+ // Enter all refinements into current scope.
+ refinements foreachBinding { (name, refinedInfo) =>
+ assert(decls.lookup(name) == NoSymbol, // DEBUG
+ s"redefinition of ${decls.lookup(name).debugString} in ${cls.showLocated}")
+ enterArgBinding(formals(name), refinedInfo, cls, decls)
+ }
+ // Forward definitions in super classes that have one of the refined parameters
+ // as aliases directly to the refined info.
+ // Note that this cannot be fused with the previous loop because we now
+ // assume that all arguments have been entered in `decls`.
+ refinements foreachBinding { (name, refinedInfo) =>
+ forwardRefs(formals(name), refinedInfo, parentRefs)
+ }
+ parentRefs
+ }
+
+ /** An argument bounds violation is a triple consisting of
+ * - the argument tree
+ * - a string "upper" or "lower" indicating which bound is violated
+ * - the violated bound
+ */
+ type BoundsViolation = (Tree, String, Type)
+
+ /** The list of violations where arguments are not within bounds.
+ * @param args The arguments
+ * @param boundss The list of type bounds
+ * @param instantiate A function that maps a bound type and the list of argument types to a resulting type.
+ * Needed to handle bounds that refer to other bounds.
+ */
+ def boundsViolations(args: List[Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context): List[BoundsViolation] = {
+ val argTypes = args.tpes
+ val violations = new mutable.ListBuffer[BoundsViolation]
+ for ((arg, bounds) <- args zip boundss) {
+ def checkOverlapsBounds(lo: Type, hi: Type): Unit = {
+ //println(i"instantiating ${bounds.hi} with $argTypes")
+ //println(i" = ${instantiate(bounds.hi, argTypes)}")
+ val hiBound = instantiate(bounds.hi, argTypes.mapConserve(_.bounds.hi))
+ val loBound = instantiate(bounds.lo, argTypes.mapConserve(_.bounds.lo))
+ // Note that argTypes can contain a TypeBounds type for arguments that are
+ // not fully determined. In that case we need to check against the hi bound of the argument.
+ if (!(lo <:< hiBound)) violations += ((arg, "upper", hiBound))
+ if (!(loBound <:< hi)) violations += ((arg, "lower", bounds.lo))
+ }
+ arg.tpe match {
+ case TypeBounds(lo, hi) => checkOverlapsBounds(lo, hi)
+ case tp => checkOverlapsBounds(tp, tp)
+ }
+ }
+ violations.toList
+ }
+
+ /** Is `feature` enabled in class `owner`?
+ * This is the case if one of the following two alternatives holds:
+ *
+ * 1. The feature is imported by a named import
+ *
+ * import owner.feature
+ *
+ * (the feature may be bunched with others, or renamed, but wildcard imports
+ * don't count).
+ *
+ * 2. The feature is enabled by a compiler option
+ *
+ * - language:<prefix>feature
+ *
+ * where <prefix> is the full name of the owner followed by a "." minus
+ * the prefix "dotty.language.".
+ */
+ def featureEnabled(owner: ClassSymbol, feature: TermName): Boolean = {
+ def toPrefix(sym: Symbol): String =
+ if (!sym.exists || (sym eq defn.LanguageModuleClass)) ""
+ else toPrefix(sym.owner) + sym.name + "."
+ def featureName = toPrefix(owner) + feature
+ def hasImport(implicit ctx: Context): Boolean = {
+ if (ctx.importInfo == null || (ctx.importInfo.site.widen.typeSymbol ne owner)) false
+ else if (ctx.importInfo.excluded.contains(feature)) false
+ else if (ctx.importInfo.originals.contains(feature)) true
+ else {
+ var c = ctx.outer
+ while (c.importInfo eq ctx.importInfo) c = c.outer
+ hasImport(c)
+ }
+ }
+ def hasOption = ctx.base.settings.language.value exists (s => s == featureName || s == "_")
+ hasImport(ctx.withPhase(ctx.typerPhase)) || hasOption
+ }
+
+ /** Is auto-tupling enabled? */
+ def canAutoTuple =
+ !featureEnabled(defn.LanguageModuleClass, nme.noAutoTupling)
+
+ def scala2Mode =
+ featureEnabled(defn.LanguageModuleClass, nme.Scala2)
+
+ def dynamicsEnabled =
+ featureEnabled(defn.LanguageModuleClass, nme.dynamics)
+
+ def testScala2Mode(msg: String, pos: Position) = {
+ if (scala2Mode) migrationWarning(msg, pos)
+ scala2Mode
+ }
+}
+
+object TypeOps {
+ @sharable var track = false // !!!DEBUG
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala b/compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala
new file mode 100644
index 000000000..647c895db
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala
@@ -0,0 +1,40 @@
+package dotty.tools.dotc.core
+
+import Names.TypeName
+import Contexts.Context
+import Types.{Type, TypeBounds}
+
+/** A common super trait of Symbol and LambdaParam.
+ * Used to capture the attributes of type parameters which can be implemented as either.
+ */
+trait TypeParamInfo {
+
+ /** Is this the info of a type parameter? Will return `false` for symbols
+ * that are not type parameters.
+ */
+ def isTypeParam(implicit ctx: Context): Boolean
+
+ /** The name of the type parameter */
+ def paramName(implicit ctx: Context): TypeName
+
+ /** The info of the type parameter */
+ def paramBounds(implicit ctx: Context): TypeBounds
+
+ /** The info of the type parameter as seen from a prefix type.
+ * For type parameter symbols, this is the `memberInfo` as seen from `prefix`.
+ * For type lambda parameters, it's the same as `paramBounds` as
+ * `asSeenFrom` has already been applied to the whole type lambda.
+ */
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds
+
+ /** The parameter bounds, or the completer if the type parameter
+ * is an as-yet uncompleted symbol.
+ */
+ def paramBoundsOrCompleter(implicit ctx: Context): Type
+
+ /** The variance of the type parameter */
+ def paramVariance(implicit ctx: Context): Int
+
+ /** A type that refers to the parameter */
+ def paramRef(implicit ctx: Context): Type
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala
new file mode 100644
index 000000000..5c476c1cb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala
@@ -0,0 +1,210 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._
+import Flags._
+import Contexts._
+import util.{SimpleMap, DotClass}
+import reporting._
+import printing.{Showable, Printer}
+import printing.Texts._
+import config.Config
+import collection.mutable
+
+class TyperState(r: Reporter) extends DotClass with Showable {
+
+ /** The current reporter */
+ def reporter = r
+
+ /** The current constraint set */
+ def constraint: Constraint =
+ new OrderingConstraint(SimpleMap.Empty, SimpleMap.Empty, SimpleMap.Empty)
+ def constraint_=(c: Constraint)(implicit ctx: Context): Unit = {}
+
+ /** The uninstantiated variables */
+ def uninstVars = constraint.uninstVars
+
+ /** The ephemeral flag is set as a side effect if an operation accesses
+ * the underlying type of a type variable. The reason we need this flag is
+ * that any such operation is not referentially transparent; it might logically change
+ * its value at the moment the type variable is instantiated. Caching code needs to
+ * check the ephemeral flag; If the flag is set during an operation, the result
+ * of that operation should not be cached.
+ */
+ def ephemeral: Boolean = false
+ def ephemeral_=(x: Boolean): Unit = ()
+
+ /** Gives for each instantiated type var that does not yet have its `inst` field
+ * set, the instance value stored in the constraint. Storing instances in constraints
+ * is done only in a temporary way for contexts that may be retracted
+ * without also retracting the type var as a whole.
+ */
+ def instType(tvar: TypeVar)(implicit ctx: Context): Type = constraint.entry(tvar.origin) match {
+ case _: TypeBounds => NoType
+ case tp: PolyParam =>
+ var tvar1 = constraint.typeVarOfParam(tp)
+ if (tvar1.exists) tvar1 else tp
+ case tp => tp
+ }
+
+ /** A fresh typer state with the same constraint as this one.
+ * @param isCommittable The constraint can be committed to an enclosing context.
+ */
+ def fresh(isCommittable: Boolean): TyperState = this
+
+ /** A fresh type state with the same constraint as this one and the given reporter */
+ def withReporter(reporter: Reporter) = new TyperState(reporter)
+
+ /** Commit state so that it gets propagated to enclosing context */
+ def commit()(implicit ctx: Context): Unit = unsupported("commit")
+
+ /** The closest ancestor of this typer state (including possibly this typer state itself)
+ * which is not yet committed, or which does not have a parent.
+ */
+ def uncommittedAncestor: TyperState = this
+
+ /** Make type variable instances permanent by assigning to `inst` field if
+ * type variable instantiation cannot be retracted anymore. Then, remove
+ * no-longer needed constraint entries.
+ */
+ def gc()(implicit ctx: Context): Unit = ()
+
+ /** Is it allowed to commit this state? */
+ def isCommittable: Boolean = false
+
+ /** Can this state be transitively committed until the top-level? */
+ def isGlobalCommittable: Boolean = false
+
+ def tryWithFallback[T](op: => T)(fallback: => T)(implicit ctx: Context): T = unsupported("tryWithFallBack")
+
+ override def toText(printer: Printer): Text = "ImmutableTyperState"
+}
+
+class MutableTyperState(previous: TyperState, r: Reporter, override val isCommittable: Boolean)
+extends TyperState(r) {
+
+ private var myReporter = r
+
+ override def reporter = myReporter
+
+ private val previousConstraint = previous.constraint
+ private var myConstraint: Constraint = previousConstraint
+
+ override def constraint = myConstraint
+ override def constraint_=(c: Constraint)(implicit ctx: Context) = {
+ if (Config.debugCheckConstraintsClosed && isGlobalCommittable) c.checkClosed()
+ myConstraint = c
+ }
+
+ private var myEphemeral: Boolean = previous.ephemeral
+
+ override def ephemeral = myEphemeral
+ override def ephemeral_=(x: Boolean): Unit = { myEphemeral = x }
+
+ override def fresh(isCommittable: Boolean): TyperState =
+ new MutableTyperState(this, new StoreReporter(reporter), isCommittable)
+
+ override def withReporter(reporter: Reporter) =
+ new MutableTyperState(this, reporter, isCommittable)
+
+ override val isGlobalCommittable =
+ isCommittable &&
+ (!previous.isInstanceOf[MutableTyperState] || previous.isGlobalCommittable)
+
+ private var isCommitted = false
+
+ override def uncommittedAncestor: TyperState =
+ if (isCommitted) previous.uncommittedAncestor else this
+
+ /** Commit typer state so that its information is copied into current typer state
+ * In addition (1) the owning state of undetermined or temporarily instantiated
+ * type variables changes from this typer state to the current one. (2) Variables
+ * that were temporarily instantiated in the current typer state are permanently
+ * instantiated instead.
+ *
+ * A note on merging: An interesting test case is isApplicableSafe.scala. It turns out that this
+ * requires a context merge using the new `&' operator. Sequence of actions:
+ * 1) Typecheck argument in typerstate 1.
+ * 2) Cache argument.
+ * 3) Evolve same typer state (to typecheck other arguments, say)
+ * leading to a different constraint.
+ * 4) Take typechecked argument in same state.
+ *
+ * It turns out that the merge is needed not just for
+ * isApplicableSafe but also for (e.g. erased-lubs.scala) as well as
+ * many parts of dotty itself.
+ */
+ override def commit()(implicit ctx: Context) = {
+ val targetState = ctx.typerState
+ assert(isCommittable)
+ targetState.constraint =
+ if (targetState.constraint eq previousConstraint) constraint
+ else targetState.constraint & constraint
+ constraint foreachTypeVar { tvar =>
+ if (tvar.owningState eq this)
+ tvar.owningState = targetState
+ }
+ targetState.ephemeral |= ephemeral
+ targetState.gc()
+ reporter.flush()
+ isCommitted = true
+ }
+
+ override def gc()(implicit ctx: Context): Unit = {
+ val toCollect = new mutable.ListBuffer[PolyType]
+ constraint foreachTypeVar { tvar =>
+ if (!tvar.inst.exists) {
+ val inst = instType(tvar)
+ if (inst.exists && (tvar.owningState eq this)) {
+ tvar.inst = inst
+ val poly = tvar.origin.binder
+ if (constraint.isRemovable(poly)) toCollect += poly
+ }
+ }
+ }
+ for (poly <- toCollect)
+ constraint = constraint.remove(poly)
+ }
+
+ /** Try operation `op`; if it produces errors, execute `fallback` with constraint and
+ * reporter as they were before `op` was executed. This is similar to `typer/tryEither`,
+ * but with one important difference: Any type variable instantiations produced by `op`
+ * are persisted even if `op` fails. This is normally not what one wants and therefore
+ * it is recommended to use
+ *
+ * tryEither { implicit ctx => op } { (_, _) => fallBack }
+ *
+ * instead of
+ *
+ * ctx.tryWithFallback(op)(fallBack)
+ *
+ * `tryWithFallback` is only used when an implicit parameter search fails
+ * and the whole expression is subsequently retype-checked with a Wildcard
+ * expected type (so as to allow an implicit conversion on the result and
+ * avoid over-constraining the implicit parameter search). In this case,
+ * the only type variables that might be falsely instantiated by `op` but
+ * not by `fallBack` are type variables in the typed expression itself, and
+ * these will be thrown away and new ones will be created on re-typing.
+ * So `tryWithFallback` is safe. It is also necessary because without it
+ * we do not propagate enough instantiation information into the implicit search
+ * and this might lead to a missing parameter type error. This is exhibited
+ * at several places in the test suite (for instance in `pos_typers`).
+ * Overall, this is rather ugly, but despite trying for 2 days I have not
+ * found a better solution.
+ */
+ override def tryWithFallback[T](op: => T)(fallback: => T)(implicit ctx: Context): T = {
+ val storeReporter = new StoreReporter(myReporter)
+ val savedReporter = myReporter
+ myReporter = storeReporter
+ val savedConstraint = myConstraint
+ val result = try op finally myReporter = savedReporter
+ if (!storeReporter.hasErrors) result
+ else {
+ myConstraint = savedConstraint
+ fallback
+ }
+ }
+
+ override def toText(printer: Printer): Text = constraint.toText(printer)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Types.overflow b/compiler/src/dotty/tools/dotc/core/Types.overflow
new file mode 100644
index 000000000..77f1f6fc1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Types.overflow
@@ -0,0 +1,66 @@
+object Types {
+ class Type {
+
+ /** The non-private symbol with given name in the given class that matches this type.
+ * @param inClass The class containing the symbol's definition
+ * @param name The name of the symbol we are looking for
+ * @param site The base type from which member types are computed
+ def matchingTermSymbol(inClass: Symbol, name: Name, site: Type)(implicit ctx: Context): Symbol = {
+ var denot = inClass.info.nonPrivateDecl(name)
+ if (denot.isTerm) { // types of the same name always match
+ if (denot.isOverloaded)
+ denot = denot.atSignature(this.signature) // seems we need two kinds of signatures here
+ if (!(site.memberInfo(denot.symbol) matches this))
+ denot = NoDenotation
+ }
+ denot.symbol
+ }
+
+ final def firstParamTypes: List[Type] = this match {
+ case mt: MethodType => mt.paramTypes
+ case pt: PolyType => pt.firstParamTypes
+ case _ => Nil
+ }
+
+ /** `tp` is either a type variable or poly param. Returns
+ * Covariant if all occurrences of `tp` in this type are covariant
+ * Contravariant if all occurrences of `tp` in this type are contravariant
+ * Covariant | Contravariant if there are no occurrences of `tp` in this type
+ * EmptyFlags if `tp` occurs noon-variantly in this type
+ */
+ def varianceOf(tp: Type): FlagSet = ???
+
+
+ }
+
+ class AndType extends Type {
+
+ def derived_& (tp1: Type, tp2: Type)(implicit ctx: Context) =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else tp1 & tp2
+
+ }
+
+ class OrType extends Type {
+
+ def derived_| (tp1: Type, tp2: Type)(implicit ctx: Context) =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else tp1 | tp2
+
+ }
+
+ class MethodType {
+ /* probably won't be needed
+ private var _isVarArgs: Boolean = _
+ private var knownVarArgs: Boolean = false
+
+ def isVarArgs(implicit ctx: Context) = {
+ if (!knownVarArgs) {
+ _isVarArgs = paramTypes.nonEmpty && paramTypes.last.isRepeatedParam
+ knownVarArgs = true
+ }
+ _isVarArgs
+ }
+ */
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala
new file mode 100644
index 000000000..89bc21929
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Types.scala
@@ -0,0 +1,3865 @@
+package dotty.tools
+package dotc
+package core
+
+import util.common._
+import Symbols._
+import Flags._
+import Names._
+import StdNames._, NameOps._
+import Scopes._
+import Constants._
+import Contexts._
+import Annotations._
+import SymDenotations._
+import Decorators._
+import Denotations._
+import Periods._
+import util.Positions.Position
+import util.Stats._
+import util.{DotClass, SimpleMap}
+import ast.tpd._
+import ast.TreeTypeMap
+import printing.Texts._
+import ast.untpd
+import dotty.tools.dotc.transform.Erasure
+import printing.Printer
+import Hashable._
+import Uniques._
+import collection.{mutable, Seq, breakOut}
+import config.Config
+import annotation.tailrec
+import Flags.FlagSet
+import language.implicitConversions
+import scala.util.hashing.{ MurmurHash3 => hashing }
+import config.Printers.{core, typr, cyclicErrors}
+
+object Types {
+
+ @sharable private var nextId = 0
+
+ implicit def eqType: Eq[Type, Type] = Eq
+
+ /** The class of types.
+ * The principal subclasses and sub-objects are as follows:
+ *
+ * Type -+- ProxyType --+- NamedType ----+--- TypeRef
+ * | | \
+ * | +- SingletonType-+-+- TermRef
+ * | | |
+ * | | +--- ThisType
+ * | | +--- SuperType
+ * | | +--- ConstantType
+ * | | +--- MethodParam
+ * | | +----RecThis
+ * | | +--- SkolemType
+ * | +- PolyParam
+ * | +- RefinedOrRecType -+-- RefinedType
+ * | | -+-- RecType
+ * | +- HKApply
+ * | +- TypeBounds
+ * | +- ExprType
+ * | +- AnnotatedType
+ * | +- TypeVar
+ * | +- PolyType
+ * |
+ * +- GroundType -+- AndType
+ * +- OrType
+ * +- MethodType -----+- ImplicitMethodType
+ * | +- JavaMethodType
+ * +- ClassInfo
+ * |
+ * +- NoType
+ * +- NoPrefix
+ * +- ErrorType
+ * +- WildcardType
+ *
+ * Note: please keep in sync with copy in `docs/docs/internals/type-system.md`.
+ */
+ abstract class Type extends DotClass with Hashable with printing.Showable {
+
+// ----- Tests -----------------------------------------------------
+
+ // debug only: a unique identifier for a type
+ val uniqId = {
+ nextId = nextId + 1
+// if (nextId == 19555)
+// println("foo")
+ nextId
+ }
+
+ /** Is this type different from NoType? */
+ def exists: Boolean = true
+
+ /** This type, if it exists, otherwise `that` type */
+ def orElse(that: => Type) = if (exists) this else that
+
+ /** Is this type a value type? */
+ final def isValueType: Boolean = this.isInstanceOf[ValueType]
+
+ /** Is the is value type or type lambda? */
+ final def isValueTypeOrLambda: Boolean = isValueType || this.isInstanceOf[PolyType]
+
+ /** Does this type denote a stable reference (i.e. singleton type)? */
+ final def isStable(implicit ctx: Context): Boolean = stripTypeVar match {
+ case tp: TermRef => tp.termSymbol.isStable && tp.prefix.isStable
+ case _: SingletonType | NoPrefix => true
+ case tp: RefinedOrRecType => tp.parent.isStable
+ case _ => false
+ }
+
+ /** Is this type a (possibly refined or applied or aliased) type reference
+ * to the given type symbol?
+ * @sym The symbol to compare to. It must be a class symbol or abstract type.
+ * It makes no sense for it to be an alias type because isRef would always
+ * return false in that case.
+ */
+ def isRef(sym: Symbol)(implicit ctx: Context): Boolean = stripAnnots.stripTypeVar match {
+ case this1: TypeRef =>
+ this1.info match { // see comment in Namer#typeDefSig
+ case TypeAlias(tp) => tp.isRef(sym)
+ case _ => this1.symbol eq sym
+ }
+ case this1: RefinedOrRecType => this1.parent.isRef(sym)
+ case this1: HKApply => this1.superType.isRef(sym)
+ case _ => false
+ }
+
+ /** Is this type a (neither aliased nor applied) reference to class `sym`? */
+ def isDirectRef(sym: Symbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ case this1: TypeRef =>
+ this1.name == sym.name && // avoid forcing info if names differ
+ (this1.symbol eq sym)
+ case _ =>
+ false
+ }
+
+ /** Does this type refer exactly to class symbol `sym`, instead of to a subclass of `sym`?
+ * Implemented like `isRef`, but follows more types: all type proxies as well as and- and or-types
+ */
+ private[Types] def isTightPrefix(sym: Symbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ case tp: NamedType => tp.info.isTightPrefix(sym)
+ case tp: ClassInfo => tp.cls eq sym
+ case tp: Types.ThisType => tp.cls eq sym
+ case tp: TypeProxy => tp.underlying.isTightPrefix(sym)
+ case tp: AndType => tp.tp1.isTightPrefix(sym) && tp.tp2.isTightPrefix(sym)
+ case tp: OrType => tp.tp1.isTightPrefix(sym) || tp.tp2.isTightPrefix(sym)
+ case _ => false
+ }
+
+ /** Is this type an instance of a non-bottom subclass of the given class `cls`? */
+ final def derivesFrom(cls: Symbol)(implicit ctx: Context): Boolean = {
+ def loop(tp: Type) = tp match {
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.isClass) sym.derivesFrom(cls) else tp.superType.derivesFrom(cls)
+ case tp: TypeProxy =>
+ tp.underlying.derivesFrom(cls)
+ case tp: AndType =>
+ tp.tp1.derivesFrom(cls) || tp.tp2.derivesFrom(cls)
+ case tp: OrType =>
+ tp.tp1.derivesFrom(cls) && tp.tp2.derivesFrom(cls)
+ case tp: JavaArrayType =>
+ cls == defn.ObjectClass
+ case _ =>
+ false
+ }
+ cls == defn.AnyClass || loop(this)
+ }
+
+ /** Is this type guaranteed not to have `null` as a value?
+ * For the moment this is only true for modules, but it could
+ * be refined later.
+ */
+ final def isNotNull(implicit ctx: Context): Boolean =
+ classSymbol is ModuleClass
+
+ /** Is this type produced as a repair for an error? */
+ final def isError(implicit ctx: Context): Boolean = stripTypeVar match {
+ case ErrorType => true
+ case tp => (tp.typeSymbol is Erroneous) || (tp.termSymbol is Erroneous)
+ }
+
+ /** Is some part of this type produced as a repair for an error? */
+ final def isErroneous(implicit ctx: Context): Boolean = existsPart(_.isError, forceLazy = false)
+
+ /** Does the type carry an annotation that is an instance of `cls`? */
+ final def hasAnnotation(cls: ClassSymbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ case AnnotatedType(tp, annot) => (annot matches cls) || (tp hasAnnotation cls)
+ case _ => false
+ }
+
+ /** Does this type occur as a part of type `that`? */
+ final def occursIn(that: Type)(implicit ctx: Context): Boolean =
+ that existsPart (this == _)
+
+ /** Is this a type of a repeated parameter? */
+ def isRepeatedParam(implicit ctx: Context): Boolean =
+ typeSymbol eq defn.RepeatedParamClass
+
+ /** Does this type carry an UnsafeNonvariant annotation? */
+ final def isUnsafeNonvariant(implicit ctx: Context): Boolean = this match {
+ case AnnotatedType(_, annot) => annot.symbol == defn.UnsafeNonvariantAnnot
+ case _ => false
+ }
+
+ /** Does this type have an UnsafeNonvariant annotation on one of its parts? */
+ final def hasUnsafeNonvariant(implicit ctx: Context): Boolean =
+ new HasUnsafeNonAccumulator().apply(false, this)
+
+ /** Is this the type of a method that has a repeated parameter type as
+ * last parameter type?
+ */
+ def isVarArgsMethod(implicit ctx: Context): Boolean = this match {
+ case tp: PolyType => tp.resultType.isVarArgsMethod
+ case MethodType(_, paramTypes) => paramTypes.nonEmpty && paramTypes.last.isRepeatedParam
+ case _ => false
+ }
+
+ /** Is this an alias TypeBounds? */
+ def isAlias: Boolean = this.isInstanceOf[TypeAlias]
+
+// ----- Higher-order combinators -----------------------------------
+
+ /** Returns true if there is a part of this type that satisfies predicate `p`.
+ */
+ final def existsPart(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context): Boolean =
+ new ExistsAccumulator(p, forceLazy).apply(false, this)
+
+ /** Returns true if all parts of this type satisfy predicate `p`.
+ */
+ final def forallParts(p: Type => Boolean)(implicit ctx: Context): Boolean =
+ !existsPart(!p(_))
+
+ /** Performs operation on all parts of this type */
+ final def foreachPart(p: Type => Unit, stopAtStatic: Boolean = false)(implicit ctx: Context): Unit =
+ new ForeachAccumulator(p, stopAtStatic).apply((), this)
+
+ /** The parts of this type which are type or term refs */
+ final def namedParts(implicit ctx: Context): collection.Set[NamedType] =
+ namedPartsWith(alwaysTrue)
+
+ /** The parts of this type which are type or term refs and which
+ * satisfy predicate `p`.
+ *
+ * @param p The predicate to satisfy
+ * @param excludeLowerBounds If set to true, the lower bounds of abstract
+ * types will be ignored.
+ */
+ def namedPartsWith(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context): collection.Set[NamedType] =
+ new NamedPartsAccumulator(p, excludeLowerBounds).apply(mutable.LinkedHashSet(), this)
+
+ /** Map function `f` over elements of an AndType, rebuilding with function `g` */
+ def mapReduceAnd[T](f: Type => T)(g: (T, T) => T)(implicit ctx: Context): T = stripTypeVar match {
+ case AndType(tp1, tp2) => g(tp1.mapReduceAnd(f)(g), tp2.mapReduceAnd(f)(g))
+ case _ => f(this)
+ }
+
+ /** Map function `f` over elements of an OrType, rebuilding with function `g` */
+ final def mapReduceOr[T](f: Type => T)(g: (T, T) => T)(implicit ctx: Context): T = stripTypeVar match {
+ case OrType(tp1, tp2) => g(tp1.mapReduceOr(f)(g), tp2.mapReduceOr(f)(g))
+ case _ => f(this)
+ }
+
+// ----- Associated symbols ----------------------------------------------
+
+ /** The type symbol associated with the type */
+ final def typeSymbol(implicit ctx: Context): Symbol = this match {
+ case tp: TypeRef => tp.symbol
+ case tp: ClassInfo => tp.cls
+// case ThisType(cls) => cls // needed?
+ case tp: SingletonType => NoSymbol
+ case tp: TypeProxy => tp.underlying.typeSymbol
+ case _ => NoSymbol
+ }
+
+ /** The least class or trait of which this type is a subtype or parameterized
+ * instance, or NoSymbol if none exists (either because this type is not a
+ * value type, or because superclasses are ambiguous).
+ */
+ final def classSymbol(implicit ctx: Context): Symbol = this match {
+ case ConstantType(constant) =>
+ constant.tpe.classSymbol
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.isClass) sym else tp.superType.classSymbol
+ case tp: ClassInfo =>
+ tp.cls
+ case tp: SingletonType =>
+ NoSymbol
+ case tp: TypeProxy =>
+ tp.underlying.classSymbol
+ case AndType(l, r) =>
+ val lsym = l.classSymbol
+ val rsym = r.classSymbol
+ if (lsym isSubClass rsym) lsym
+ else if (rsym isSubClass lsym) rsym
+ else NoSymbol
+ case OrType(l, r) => // TODO does not conform to spec
+ val lsym = l.classSymbol
+ val rsym = r.classSymbol
+ if (lsym isSubClass rsym) rsym
+ else if (rsym isSubClass lsym) lsym
+ else NoSymbol
+ case _ =>
+ NoSymbol
+ }
+
+ /** The least (wrt <:<) set of class symbols of which this type is a subtype
+ */
+ final def classSymbols(implicit ctx: Context): List[ClassSymbol] = this match {
+ case tp: ClassInfo =>
+ tp.cls :: Nil
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.isClass) sym.asClass :: Nil else tp.superType.classSymbols
+ case tp: TypeProxy =>
+ tp.underlying.classSymbols
+ case AndType(l, r) =>
+ l.classSymbols union r.classSymbols
+ case OrType(l, r) =>
+ l.classSymbols intersect r.classSymbols // TODO does not conform to spec
+ case _ =>
+ Nil
+ }
+
+ /** The term symbol associated with the type */
+ final def termSymbol(implicit ctx: Context): Symbol = this match {
+ case tp: TermRef => tp.symbol
+ case tp: TypeProxy => tp.underlying.termSymbol
+ case _ => NoSymbol
+ }
+
+ /** The base classes of this type as determined by ClassDenotation
+ * in linearization order, with the class itself as first element.
+ * For AndTypes/OrTypes, the union/intersection of the operands' baseclasses.
+ * Inherited by all type proxies. `Nil` for all other types.
+ */
+ final def baseClasses(implicit ctx: Context): List[ClassSymbol] = track("baseClasses") {
+ this match {
+ case tp: TypeProxy =>
+ tp.underlying.baseClasses
+ case tp: ClassInfo =>
+ tp.cls.baseClasses
+ case AndType(tp1, tp2) =>
+ tp1.baseClasses union tp2.baseClasses
+ case OrType(tp1, tp2) =>
+ tp1.baseClasses intersect tp2.baseClasses
+ case _ => Nil
+ }
+ }
+
+// ----- Member access -------------------------------------------------
+
+ /** The scope of all declarations of this type.
+ * Defined by ClassInfo, inherited by type proxies.
+ * Empty scope for all other types.
+ */
+ final def decls(implicit ctx: Context): Scope = this match {
+ case tp: ClassInfo =>
+ tp.decls
+ case tp: TypeProxy =>
+ tp.underlying.decls
+ case _ =>
+ EmptyScope
+ }
+
+ /** A denotation containing the declaration(s) in this type with the given name.
+ * The result is either a SymDenotation or a MultiDenotation of SymDenotations.
+ * The info(s) are the original symbol infos, no translation takes place.
+ */
+ final def decl(name: Name)(implicit ctx: Context): Denotation = track("decl") {
+ findDecl(name, EmptyFlags)
+ }
+
+ /** A denotation containing the non-private declaration(s) in this type with the given name */
+ final def nonPrivateDecl(name: Name)(implicit ctx: Context): Denotation = track("nonPrivateDecl") {
+ findDecl(name, Private)
+ }
+
+ /** A denotation containing the declaration(s) in this type with the given
+ * name, as seen from prefix type `pre`. Declarations that have a flag
+ * in `excluded` are omitted.
+ */
+ final def findDecl(name: Name, excluded: FlagSet)(implicit ctx: Context): Denotation = this match {
+ case tp: ClassInfo =>
+ tp.decls.denotsNamed(name).filterExcluded(excluded).toDenot(NoPrefix)
+ case tp: TypeProxy =>
+ tp.underlying.findDecl(name, excluded)
+ case ErrorType =>
+ ctx.newErrorSymbol(classSymbol orElse defn.RootClass, name)
+ case _ =>
+ NoDenotation
+ }
+
+ /** The member of this type with the given name */
+ final def member(name: Name)(implicit ctx: Context): Denotation = /*>|>*/ track("member") /*<|<*/ {
+ memberExcluding(name, EmptyFlags)
+ }
+
+ /** The non-private member of this type with the given name. */
+ final def nonPrivateMember(name: Name)(implicit ctx: Context): Denotation = track("nonPrivateMember") {
+ memberExcluding(name, Flags.Private)
+ }
+
+ final def memberExcluding(name: Name, excluding: FlagSet)(implicit ctx: Context): Denotation = {
+ // We need a valid prefix for `asSeenFrom`
+ val pre = this match {
+ case tp: ClassInfo =>
+ tp.typeRef
+ case _ =>
+ widenIfUnstable
+ }
+ findMember(name, pre, excluding)
+ }
+
+ /** Find member of this type with given name and
+ * produce a denotation that contains the type of the member
+ * as seen from given prefix `pre`. Exclude all members that have
+ * flags in `excluded` from consideration.
+ */
+ final def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation = {
+ @tailrec def go(tp: Type): Denotation = tp match {
+ case tp: RefinedType =>
+ if (name eq tp.refinedName) goRefined(tp) else go(tp.parent)
+ case tp: ThisType =>
+ goThis(tp)
+ case tp: TypeRef =>
+ tp.denot.findMember(name, pre, excluded)
+ case tp: TermRef =>
+ go (tp.underlying match {
+ case mt: MethodType
+ if mt.paramTypes.isEmpty && (tp.symbol is Stable) => mt.resultType
+ case tp1 => tp1
+ })
+ case tp: PolyParam =>
+ goParam(tp)
+ case tp: RecType =>
+ goRec(tp)
+ case tp: HKApply =>
+ goApply(tp)
+ case tp: TypeProxy =>
+ go(tp.underlying)
+ case tp: ClassInfo =>
+ tp.cls.findMember(name, pre, excluded)
+ case AndType(l, r) =>
+ goAnd(l, r)
+ case tp: OrType =>
+ // we need to keep the invariant that `pre <: tp`. Branch `union-types-narrow-prefix`
+ // achieved that by narrowing `pre` to each alternative, but it led to merge errors in
+ // lots of places. The present strategy is instead of widen `tp` using `join` to be a
+ // supertype of `pre`.
+ go(tp.join)
+ case tp: JavaArrayType =>
+ defn.ObjectType.findMember(name, pre, excluded)
+ case ErrorType =>
+ ctx.newErrorSymbol(pre.classSymbol orElse defn.RootClass, name)
+ case _ =>
+ NoDenotation
+ }
+ def goRec(tp: RecType) =
+ if (tp.parent == null) NoDenotation
+ else {
+ //println(s"find member $pre . $name in $tp")
+
+ // We have to be careful because we might open the same (wrt eq) recursive type
+ // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)`
+ // call below. To avoid this problem we do a defensive copy of the recursive
+ // type first. But if we do this always we risk being inefficient and we ran into
+ // stackoverflows when compiling pos/hk.scala under the refinement encoding
+ // of hk-types. So we only do a copy if the type
+ // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`.
+ // Furthermore, if this happens we mark the original recursive type with `openedTwice`
+ // which means that we always defensively copy the type in the future. This second
+ // measure is necessary because findMember calls might be cached, so do not
+ // necessarily appear in nested order.
+ // Without the defensive copy, Typer.scala fails to compile at the line
+ //
+ // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType)
+ //
+ // because the subtype check
+ //
+ // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed]
+ //
+ // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.)
+ //
+ // Without the `openedTwice` trick, Typer.scala fails to Ycheck
+ // at phase resolveSuper.
+ val rt =
+ if (tp.opened) { // defensive copy
+ tp.openedTwice = true
+ RecType(rt => tp.parent.substRecThis(tp, RecThis(rt)))
+ } else tp
+ rt.opened = true
+ try go(rt.parent).mapInfo(_.substRecThis(rt, pre))
+ finally {
+ if (!rt.openedTwice) rt.opened = false
+ }
+ }
+
+ def goRefined(tp: RefinedType) = {
+ val pdenot = go(tp.parent)
+ val rinfo = tp.refinedInfo
+ if (name.isTypeName) { // simplified case that runs more efficiently
+ val jointInfo =
+ if (rinfo.isAlias) rinfo
+ else if (pdenot.info.isAlias) pdenot.info
+ else if (ctx.pendingMemberSearches.contains(name)) pdenot.info safe_& rinfo
+ else
+ try pdenot.info & rinfo
+ catch {
+ case ex: CyclicReference =>
+ // happens for tests/pos/sets.scala. findMember is called from baseTypeRef.
+ // The & causes a subtype check which calls baseTypeRef again with the same
+ // superclass. In the observed case, the superclass was Any, and
+ // the special shortcut for Any in derivesFrom was as yet absent. To reproduce,
+ // remove the special treatment of Any in derivesFrom and compile
+ // sets.scala.
+ pdenot.info safe_& rinfo
+ }
+ pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo)
+ } else {
+ pdenot & (
+ new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId)),
+ pre,
+ safeIntersection = ctx.pendingMemberSearches.contains(name))
+ }
+ }
+
+ def goApply(tp: HKApply) = tp.tycon match {
+ case tl: PolyType =>
+ go(tl.resType).mapInfo(info =>
+ tl.derivedLambdaAbstraction(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args))
+ case _ =>
+ go(tp.superType)
+ }
+
+ def goThis(tp: ThisType) = {
+ val d = go(tp.underlying)
+ if (d.exists)
+ if ((pre eq tp) && d.symbol.is(NamedTypeParam) && (d.symbol.owner eq tp.cls))
+ // If we look for a named type parameter `P` in `C.this.P`, looking up
+ // the fully applied self type of `C` will give as an info the alias type
+ // `P = this.P`. We need to return a denotation with the underlying bounds instead.
+ d.symbol.denot
+ else d
+ else
+ // There is a special case to handle:
+ // trait Super { this: Sub => private class Inner {} println(this.Inner) }
+ // class Sub extends Super
+ // When resolving Super.this.Inner, the normal logic goes to the self type and
+ // looks for Inner from there. But this fails because Inner is private.
+ // We fix the problem by having the following fallback case, which links up the
+ // member in Super instead of Sub.
+ // As an example of this in the wild, see
+ // loadClassWithPrivateInnerAndSubSelf in ShowClassTests
+ go(tp.cls.typeRef) orElse d
+ }
+ def goParam(tp: PolyParam) = {
+ val next = tp.underlying
+ ctx.typerState.constraint.entry(tp) match {
+ case bounds: TypeBounds if bounds ne next =>
+ ctx.typerState.ephemeral = true
+ go(bounds.hi)
+ case _ =>
+ go(next)
+ }
+ }
+ def goAnd(l: Type, r: Type) = {
+ go(l) & (go(r), pre, safeIntersection = ctx.pendingMemberSearches.contains(name))
+ }
+
+ { val recCount = ctx.findMemberCount + 1
+ ctx.findMemberCount = recCount
+ if (recCount >= Config.LogPendingFindMemberThreshold)
+ ctx.pendingMemberSearches = name :: ctx.pendingMemberSearches
+ }
+
+ //assert(ctx.findMemberCount < 20)
+ try go(this)
+ catch {
+ case ex: Throwable =>
+ core.println(i"findMember exception for $this member $name, pre = $pre")
+ throw ex // DEBUG
+ }
+ finally {
+ val recCount = ctx.findMemberCount
+ if (recCount >= Config.LogPendingFindMemberThreshold)
+ ctx.pendingMemberSearches = ctx.pendingMemberSearches.tail
+ ctx.findMemberCount = recCount - 1
+ }
+ }
+
+ /** The set of names of members of this type that pass the given name filter
+ * when seen as members of `pre`. More precisely, these are all
+ * of members `name` such that `keepOnly(pre, name)` is `true`.
+ * @note: OK to use a Set[Name] here because Name hashcodes are replayable,
+ * hence the Set will always give the same names in the same order.
+ */
+ final def memberNames(keepOnly: NameFilter, pre: Type = this)(implicit ctx: Context): Set[Name] = this match {
+ case tp: ClassInfo =>
+ tp.cls.memberNames(keepOnly) filter (keepOnly(pre, _))
+ case tp: RefinedType =>
+ val ns = tp.parent.memberNames(keepOnly, pre)
+ if (keepOnly(pre, tp.refinedName)) ns + tp.refinedName else ns
+ case tp: TypeProxy =>
+ tp.underlying.memberNames(keepOnly, pre)
+ case tp: AndType =>
+ tp.tp1.memberNames(keepOnly, pre) | tp.tp2.memberNames(keepOnly, pre)
+ case tp: OrType =>
+ tp.tp1.memberNames(keepOnly, pre) & tp.tp2.memberNames(keepOnly, pre)
+ case _ =>
+ Set()
+ }
+
+ def memberDenots(keepOnly: NameFilter, f: (Name, mutable.Buffer[SingleDenotation]) => Unit)(implicit ctx: Context): Seq[SingleDenotation] = {
+ val buf = mutable.ArrayBuffer[SingleDenotation]()
+ for (name <- memberNames(keepOnly)) f(name, buf)
+ buf
+ }
+
+ /** The set of abstract term members of this type. */
+ final def abstractTermMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTermMembers") {
+ memberDenots(abstractTermNameFilter,
+ (name, buf) => buf ++= nonPrivateMember(name).altsWith(_ is Deferred))
+ }
+
+ /** The set of abstract type members of this type. */
+ final def abstractTypeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTypeMembers") {
+ memberDenots(abstractTypeNameFilter,
+ (name, buf) => buf += nonPrivateMember(name).asSingleDenotation)
+ }
+
+ /** The set of abstract type members of this type. */
+ final def nonClassTypeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("nonClassTypeMembers") {
+ memberDenots(nonClassTypeNameFilter,
+ (name, buf) => buf += member(name).asSingleDenotation)
+ }
+
+ /** The set of type members of this type */
+ final def typeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("typeMembers") {
+ memberDenots(typeNameFilter,
+ (name, buf) => buf += member(name).asSingleDenotation)
+ }
+
+ /** The set of implicit members of this type */
+ final def implicitMembers(implicit ctx: Context): List[TermRef] = track("implicitMembers") {
+ memberDenots(implicitFilter,
+ (name, buf) => buf ++= member(name).altsWith(_ is Implicit))
+ .toList.map(d => TermRef.withSig(this, d.symbol.asTerm))
+ }
+
+ /** The set of member classes of this type */
+ final def memberClasses(implicit ctx: Context): Seq[SingleDenotation] = track("implicitMembers") {
+ memberDenots(typeNameFilter,
+ (name, buf) => buf ++= member(name).altsWith(x => x.isClass))
+ }
+
+ final def fields(implicit ctx: Context): Seq[SingleDenotation] = track("fields") {
+ memberDenots(fieldFilter,
+ (name, buf) => buf ++= member(name).altsWith(x => !x.is(Method)))
+ }
+
+ /** The set of members of this type having at least one of `requiredFlags` but none of `excludedFlags` set */
+ final def membersBasedOnFlags(requiredFlags: FlagSet, excludedFlags: FlagSet)(implicit ctx: Context): Seq[SingleDenotation] = track("implicitMembers") {
+ memberDenots(takeAllFilter,
+ (name, buf) => buf ++= memberExcluding(name, excludedFlags).altsWith(x => x.is(requiredFlags)))
+ }
+
+ /** The info of `sym`, seen as a member of this type. */
+ final def memberInfo(sym: Symbol)(implicit ctx: Context): Type =
+ sym.info.asSeenFrom(this, sym.owner)
+
+ /** This type seen as if it were the type of a member of prefix type `pre`
+ * declared in class `cls`.
+ */
+ final def asSeenFrom(pre: Type, cls: Symbol)(implicit ctx: Context): Type = track("asSeenFrom") {
+ if (!cls.membersNeedAsSeenFrom(pre)) this
+ else ctx.asSeenFrom(this, pre, cls)
+ }
+
+// ----- Subtype-related --------------------------------------------
+
+ /** Is this type a subtype of that type? */
+ final def <:<(that: Type)(implicit ctx: Context): Boolean = track("<:<") {
+ ctx.typeComparer.topLevelSubType(this, that)
+ }
+
+ /** Is this type a subtype of that type? */
+ final def frozen_<:<(that: Type)(implicit ctx: Context): Boolean = track("frozen_<:<") {
+ ctx.typeComparer.isSubTypeWhenFrozen(this, that)
+ }
+
+ /** Is this type the same as that type?
+ * This is the case iff `this <:< that` and `that <:< this`.
+ */
+ final def =:=(that: Type)(implicit ctx: Context): Boolean = track("=:=") {
+ ctx.typeComparer.isSameType(this, that)
+ }
+
+ /** Is this type a primitive value type which can be widened to the primitive value type `that`? */
+ def isValueSubType(that: Type)(implicit ctx: Context) = widen match {
+ case self: TypeRef if self.symbol.isPrimitiveValueClass =>
+ that.widenExpr match {
+ case that: TypeRef if that.symbol.isPrimitiveValueClass =>
+ defn.isValueSubClass(self.symbol, that.symbol)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ def relaxed_<:<(that: Type)(implicit ctx: Context) =
+ (this <:< that) || (this isValueSubType that)
+
+ /** Is this type a legal type for a member that overrides another
+ * member of type `that`? This is the same as `<:<`, except that
+ * the types ()T and => T are identified, and T is seen as overriding
+ * either type.
+ */
+ final def overrides(that: Type)(implicit ctx: Context) = {
+ def result(tp: Type): Type = tp match {
+ case ExprType(_) | MethodType(Nil, _) => tp.resultType
+ case _ => tp
+ }
+ (this frozen_<:< that) || {
+ val rthat = result(that)
+ (rthat ne that) && (result(this) frozen_<:< rthat)
+ }
+ }
+
+ /** Is this type close enough to that type so that members
+ * with the two types would override each other?
+ * This means:
+ * - Either both types are polytypes with the same number of
+ * type parameters and their result types match after renaming
+ * corresponding type parameters
+ * - Or both types are method types with =:=-equivalent(*) parameter types
+ * and matching result types after renaming corresponding parameter types
+ * if the method types are dependent.
+ * - Or both types are =:=-equivalent
+ * - Or phase.erasedTypes is false, and neither type takes
+ * term or type parameters.
+ *
+ * (*) when matching with a Java method, we also regard Any and Object as equivalent
+ * parameter types.
+ */
+ def matches(that: Type)(implicit ctx: Context): Boolean = track("matches") {
+ ctx.typeComparer.matchesType(this, that, relaxed = !ctx.phase.erasedTypes)
+ }
+
+ /** This is the same as `matches` except that it also matches => T with T and
+ * vice versa.
+ */
+ def matchesLoosely(that: Type)(implicit ctx: Context): Boolean =
+ (this matches that) || {
+ val thisResult = this.widenExpr
+ val thatResult = that.widenExpr
+ (this eq thisResult) != (that eq thatResult) && (thisResult matchesLoosely thatResult)
+ }
+
+ /** The basetype TypeRef of this type with given class symbol,
+ * but without including any type arguments
+ */
+ final def baseTypeRef(base: Symbol)(implicit ctx: Context): Type = /*ctx.traceIndented(s"$this baseTypeRef $base")*/ /*>|>*/ track("baseTypeRef") /*<|<*/ {
+ base.denot match {
+ case classd: ClassDenotation => classd.baseTypeRefOf(this)
+ case _ => NoType
+ }
+ }
+
+ def & (that: Type)(implicit ctx: Context): Type = track("&") {
+ ctx.typeComparer.glb(this, that)
+ }
+
+ /** Safer version of `&`.
+ *
+ * This version does not simplify the upper bound of the intersection of
+ * two TypeBounds. The simplification done by `&` requires subtyping checks
+ * which may end up calling `&` again, in most cases this should be safe
+ * but because of F-bounded types, this can result in an infinite loop
+ * (which will be masked unless `-Yno-deep-subtypes` is enabled).
+ */
+ def safe_& (that: Type)(implicit ctx: Context): Type = (this, that) match {
+ case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => TypeBounds(lo1 | lo2, AndType(hi1, hi2))
+ case _ => this & that
+ }
+
+ def | (that: Type)(implicit ctx: Context): Type = track("|") {
+ ctx.typeComparer.lub(this, that)
+ }
+
+// ----- Unwrapping types -----------------------------------------------
+
+ /** Map a TypeVar to either its instance if it is instantiated, or its origin,
+ * if not, until the result is no longer a TypeVar. Identity on all other types.
+ */
+ def stripTypeVar(implicit ctx: Context): Type = this
+
+ /** Remove all AnnotatedTypes wrapping this type.
+ */
+ def stripAnnots(implicit ctx: Context): Type = this
+
+ /** Widen from singleton type to its underlying non-singleton
+ * base type by applying one or more `underlying` dereferences,
+ * Also go from => T to T.
+ * Identity for all other types. Example:
+ *
+ * class Outer { class C ; val x: C }
+ * def o: Outer
+ * <o.x.type>.widen = o.C
+ */
+ final def widen(implicit ctx: Context): Type = widenSingleton match {
+ case tp: ExprType => tp.resultType.widen
+ case tp => tp
+ }
+
+ /** Widen from singleton type to its underlying non-singleton
+ * base type by applying one or more `underlying` dereferences.
+ */
+ final def widenSingleton(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: SingletonType if !tp.isOverloaded => tp.underlying.widenSingleton
+ case _ => this
+ }
+
+ /** Widen from TermRef to its underlying non-termref
+ * base type, while also skipping Expr types.
+ */
+ final def widenTermRefExpr(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: TermRef if !tp.isOverloaded => tp.underlying.widenExpr.widenTermRefExpr
+ case _ => this
+ }
+
+ /** Widen from ExprType type to its result type.
+ * (Note: no stripTypeVar needed because TypeVar's can't refer to ExprTypes.)
+ */
+ final def widenExpr: Type = this match {
+ case tp: ExprType => tp.resType
+ case _ => this
+ }
+
+ /** Widen type if it is unstable (i.e. an ExprType, or TermRef to unstable symbol */
+ final def widenIfUnstable(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: ExprType => tp.resultType.widenIfUnstable
+ case tp: TermRef if !tp.symbol.isStable => tp.underlying.widenIfUnstable
+ case _ => this
+ }
+
+ /** If this is a skolem, its underlying type, otherwise the type itself */
+ final def widenSkolem(implicit ctx: Context): Type = this match {
+ case tp: SkolemType => tp.underlying
+ case _ => this
+ }
+
+ /** Eliminate anonymous classes */
+ final def deAnonymize(implicit ctx: Context): Type = this match {
+ case tp:TypeRef if tp.symbol.isAnonymousClass =>
+ tp.symbol.asClass.typeRef.asSeenFrom(tp.prefix, tp.symbol.owner)
+ case tp => tp
+ }
+
+ private def dealias(keepAnnots: Boolean)(implicit ctx: Context): Type = this match {
+ case tp: TypeRef =>
+ if (tp.symbol.isClass) tp
+ else tp.info match {
+ case TypeAlias(tp) => tp.dealias(keepAnnots)
+ case _ => tp
+ }
+ case tp: TypeVar =>
+ val tp1 = tp.instanceOpt
+ if (tp1.exists) tp1.dealias(keepAnnots) else tp
+ case tp: AnnotatedType =>
+ val tp1 = tp.tpe.dealias(keepAnnots)
+ if (keepAnnots) tp.derivedAnnotatedType(tp1, tp.annot) else tp1
+ case tp: LazyRef =>
+ tp.ref.dealias(keepAnnots)
+ case app @ HKApply(tycon, args) =>
+ val tycon1 = tycon.dealias(keepAnnots)
+ if (tycon1 ne tycon) app.superType.dealias(keepAnnots)
+ else this
+ case _ => this
+ }
+
+ /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type
+ * is no longer alias type, LazyRef, or instantiated type variable.
+ * Goes through annotated types and rewraps annotations on the result.
+ */
+ final def dealiasKeepAnnots(implicit ctx: Context): Type =
+ dealias(keepAnnots = true)
+
+ /** Follow aliases and dereferences LazyRefs, annotated types and instantiated
+ * TypeVars until type is no longer alias type, annotated type, LazyRef,
+ * or instantiated type variable.
+ */
+ final def dealias(implicit ctx: Context): Type =
+ dealias(keepAnnots = false)
+
+ /** Perform successive widenings and dealiasings until none can be applied anymore */
+ final def widenDealias(implicit ctx: Context): Type = {
+ val res = this.widen.dealias
+ if (res eq this) res else res.widenDealias
+ }
+
+ /** Widen from constant type to its underlying non-constant
+ * base type.
+ */
+ final def deconst(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: ConstantType => tp.value.tpe
+ case _ => this
+ }
+
+ /** If this is a (possibly aliased, annotated, and/or parameterized) reference to
+ * a class, the class type ref, otherwise NoType.
+ * @param refinementOK If `true` we also skip non-parameter refinements.
+ */
+ def underlyingClassRef(refinementOK: Boolean)(implicit ctx: Context): Type = dealias match {
+ case tp: TypeRef =>
+ if (tp.symbol.isClass) tp
+ else if (tp.symbol.isAliasType) tp.underlying.underlyingClassRef(refinementOK)
+ else NoType
+ case tp: AnnotatedType =>
+ tp.underlying.underlyingClassRef(refinementOK)
+ case tp: RefinedType =>
+ def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName)
+ if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK)
+ else NoType
+ case tp: RecType =>
+ tp.underlying.underlyingClassRef(refinementOK)
+ case _ =>
+ NoType
+ }
+
+ /** The iterator of underlying types as long as type is a TypeProxy.
+ * Useful for diagnostics
+ */
+ def underlyingIterator(implicit ctx: Context): Iterator[Type] = new Iterator[Type] {
+ var current = Type.this
+ var hasNext = true
+ def next = {
+ val res = current
+ hasNext = current.isInstanceOf[TypeProxy]
+ if (hasNext) current = current.asInstanceOf[TypeProxy].underlying
+ res
+ }
+ }
+
+ /** A prefix-less refined this or a termRef to a new skolem symbol
+ * that has the given type as info.
+ */
+ def narrow(implicit ctx: Context): TermRef =
+ TermRef(NoPrefix, ctx.newSkolem(this))
+
+ /** Useful for diagnostics: The underlying type if this type is a type proxy,
+ * otherwise NoType
+ */
+ def underlyingIfProxy(implicit ctx: Context) = this match {
+ case this1: TypeProxy => this1.underlying
+ case _ => NoType
+ }
+
+ /** If this is a FunProto or PolyProto, WildcardType, otherwise this. */
+ def notApplied: Type = this
+
+ // ----- Normalizing typerefs over refined types ----------------------------
+
+ /** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed
+ * by other refinements), and the refined info is a type alias, return the alias,
+ * otherwise return NoType. Used to reduce types of the form
+ *
+ * P { ... type T = / += / -= U ... } # T
+ *
+ * to just U. Does not perform the reduction if the resulting type would contain
+ * a reference to the "this" of the current refined type, except in the following situation
+ *
+ * (1) The "this" reference can be avoided by following an alias. Example:
+ *
+ * P { type T = String, type R = P{...}.T } # R --> String
+ *
+ * (*) normalizes means: follow instantiated typevars and aliases.
+ */
+ def lookupRefined(name: Name)(implicit ctx: Context): Type = {
+ def loop(pre: Type): Type = pre.stripTypeVar match {
+ case pre: RefinedType =>
+ pre.refinedInfo match {
+ case TypeAlias(alias) =>
+ if (pre.refinedName ne name) loop(pre.parent) else alias
+ case _ => loop(pre.parent)
+ }
+ case pre: RecType =>
+ val candidate = loop(pre.parent)
+ if (candidate.exists && !pre.isReferredToBy(candidate)) {
+ //println(s"lookupRefined ${this.toString} . $name, pre: $pre ---> $candidate / ${candidate.toString}")
+ candidate
+ }
+ else NoType
+ case SkolemType(tp) =>
+ tp.lookupRefined(name)
+ case pre: WildcardType =>
+ WildcardType
+ case pre: TypeRef =>
+ pre.info match {
+ case TypeAlias(alias) => loop(alias)
+ case _ => NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ loop(this)
+ }
+
+ /** The type <this . name> , reduced if possible */
+ def select(name: Name)(implicit ctx: Context): Type = name match {
+ case name: TermName => TermRef.all(this, name)
+ case name: TypeName => TypeRef(this, name).reduceProjection
+ }
+
+ /** The type <this . name> , reduced if possible, with given denotation if unreduced */
+ def select(name: Name, denot: Denotation)(implicit ctx: Context): Type = name match {
+ case name: TermName => TermRef(this, name, denot)
+ case name: TypeName => TypeRef(this, name, denot).reduceProjection
+ }
+
+ /** The type <this . name> with given symbol, reduced if possible */
+ def select(sym: Symbol)(implicit ctx: Context): Type =
+ if (sym.isTerm) TermRef(this, sym.asTerm)
+ else TypeRef(this, sym.asType).reduceProjection
+
+// ----- Access to parts --------------------------------------------
+
+ /** The normalized prefix of this type is:
+ * For an alias type, the normalized prefix of its alias
+ * For all other named type and class infos: the prefix.
+ * Inherited by all other type proxies.
+ * `NoType` for all other types.
+ */
+ final def normalizedPrefix(implicit ctx: Context): Type = this match {
+ case tp: NamedType =>
+ if (tp.symbol.info.isAlias) tp.info.normalizedPrefix else tp.prefix
+ case tp: ClassInfo =>
+ tp.prefix
+ case tp: TypeProxy =>
+ tp.underlying.normalizedPrefix
+ case _ =>
+ NoType
+ }
+
+ /** For a ClassInfo type, its parents,
+ * Inherited by all type proxies. Empty for all other types.
+ * Overwritten in ClassInfo, where parents is cached.
+ */
+ def parents(implicit ctx: Context): List[TypeRef] = this match {
+ case tp: TypeProxy => tp.underlying.parents
+ case _ => List()
+ }
+
+ /** The full parent types, including all type arguments */
+ def parentsWithArgs(implicit ctx: Context): List[Type] = this match {
+ case tp: TypeProxy => tp.superType.parentsWithArgs
+ case _ => List()
+ }
+
+ /** The first parent of this type, AnyRef if list of parents is empty */
+ def firstParent(implicit ctx: Context): TypeRef = parents match {
+ case p :: _ => p
+ case _ => defn.AnyType
+ }
+
+ /** the self type of the underlying classtype */
+ def givenSelfType(implicit ctx: Context): Type = this match {
+ case tp: RefinedType => tp.wrapIfMember(tp.parent.givenSelfType)
+ case tp: ThisType => tp.tref.givenSelfType
+ case tp: TypeProxy => tp.superType.givenSelfType
+ case _ => NoType
+ }
+
+ /** The parameter types of a PolyType or MethodType, Empty list for others */
+ final def paramTypess(implicit ctx: Context): List[List[Type]] = this match {
+ case mt: MethodType => mt.paramTypes :: mt.resultType.paramTypess
+ case pt: PolyType => pt.resultType.paramTypess
+ case _ => Nil
+ }
+
+ /** The parameter names of a PolyType or MethodType, Empty list for others */
+ final def paramNamess(implicit ctx: Context): List[List[TermName]] = this match {
+ case mt: MethodType => mt.paramNames :: mt.resultType.paramNamess
+ case pt: PolyType => pt.resultType.paramNamess
+ case _ => Nil
+ }
+
+
+ /** The parameter types in the first parameter section of a generic type or MethodType, Empty list for others */
+ final def firstParamTypes(implicit ctx: Context): List[Type] = this match {
+ case mt: MethodType => mt.paramTypes
+ case pt: PolyType => pt.resultType.firstParamTypes
+ case _ => Nil
+ }
+
+ /** Is this either not a method at all, or a parameterless method? */
+ final def isParameterless(implicit ctx: Context): Boolean = this match {
+ case mt: MethodType => false
+ case pt: PolyType => pt.resultType.isParameterless
+ case _ => true
+ }
+
+ /** The resultType of a PolyType, MethodType, or ExprType, the type itself for others */
+ def resultType(implicit ctx: Context): Type = this
+
+ /** The final result type of a PolyType, MethodType, or ExprType, after skipping
+ * all parameter sections, the type itself for all others.
+ */
+ def finalResultType(implicit ctx: Context): Type = resultType match {
+ case mt: MethodType => mt.resultType.finalResultType
+ case pt: PolyType => pt.resultType.finalResultType
+ case _ => resultType
+ }
+
+ /** This type seen as a TypeBounds */
+ final def bounds(implicit ctx: Context): TypeBounds = this match {
+ case tp: TypeBounds => tp
+ case ci: ClassInfo => TypeAlias(ci.typeRef)
+ case wc: WildcardType =>
+ wc.optBounds match {
+ case bounds: TypeBounds => bounds
+ case NoType => TypeBounds.empty
+ }
+ case _ => TypeAlias(this)
+ }
+
+ /** The type parameter with given `name`. This tries first `decls`
+ * in order not to provoke a cycle by forcing the info. If that yields
+ * no symbol it tries `member` as an alternative.
+ */
+ def typeParamNamed(name: TypeName)(implicit ctx: Context): Symbol =
+ classSymbol.unforcedDecls.lookup(name) orElse member(name).symbol
+
+ /** If this is a prototype with some ignored component, reveal one more
+ * layer of it. Otherwise the type itself.
+ */
+ def deepenProto(implicit ctx: Context): Type = this
+
+// ----- Substitutions -----------------------------------------------------
+
+ /** Substitute all types that refer in their symbol attribute to
+ * one of the symbols in `from` by the corresponding types in `to`.
+ */
+ final def subst(from: List[Symbol], to: List[Type])(implicit ctx: Context): Type =
+ if (from.isEmpty) this
+ else {
+ val from1 = from.tail
+ if (from1.isEmpty) ctx.subst1(this, from.head, to.head, null)
+ else {
+ val from2 = from1.tail
+ if (from2.isEmpty) ctx.subst2(this, from.head, to.head, from1.head, to.tail.head, null)
+ else ctx.subst(this, from, to, null)
+ }
+ }
+
+ /** Same as `subst` but follows aliases as a fallback. When faced with a reference
+ * to an alias type, where normal substitution does not yield a new type, the
+ * substitution is instead applied to the alias. If that yields a new type,
+ * this type is returned, otherwise the original type (not the alias) is returned.
+ * A use case for this method is if one wants to substitute the type parameters
+ * of a class and also wants to substitute any parameter accessors that alias
+ * the type parameters.
+ */
+ final def substDealias(from: List[Symbol], to: List[Type])(implicit ctx: Context): Type =
+ ctx.substDealias(this, from, to, null)
+
+ /** Substitute all types of the form `PolyParam(from, N)` by
+ * `PolyParam(to, N)`.
+ */
+ final def subst(from: BindingType, to: BindingType)(implicit ctx: Context): Type =
+ ctx.subst(this, from, to, null)
+
+ /** Substitute all occurrences of `This(cls)` by `tp` */
+ final def substThis(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type =
+ ctx.substThis(this, cls, tp, null)
+
+ /** As substThis, but only is class is a static owner (i.e. a globally accessible object) */
+ final def substThisUnlessStatic(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type =
+ if (cls.isStaticOwner) this else ctx.substThis(this, cls, tp, null)
+
+ /** Substitute all occurrences of `RecThis(binder)` by `tp` */
+ final def substRecThis(binder: RecType, tp: Type)(implicit ctx: Context): Type =
+ ctx.substRecThis(this, binder, tp, null)
+
+ /** Substitute a bound type by some other type */
+ final def substParam(from: ParamType, to: Type)(implicit ctx: Context): Type =
+ ctx.substParam(this, from, to, null)
+
+ /** Substitute bound types by some other types */
+ final def substParams(from: BindingType, to: List[Type])(implicit ctx: Context): Type =
+ ctx.substParams(this, from, to, null)
+
+ /** Substitute all occurrences of symbols in `from` by references to corresponding symbols in `to`
+ */
+ final def substSym(from: List[Symbol], to: List[Symbol])(implicit ctx: Context): Type =
+ ctx.substSym(this, from, to, null)
+
+// ----- misc -----------------------------------------------------------
+
+ /** Turn type into a function type.
+ * @pre this is a non-dependent method type.
+ * @param dropLast The number of trailing parameters that should be dropped
+ * when forming the function type.
+ */
+ def toFunctionType(dropLast: Int = 0)(implicit ctx: Context): Type = this match {
+ case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) =>
+ val formals1 = if (dropLast == 0) formals else formals dropRight dropLast
+ defn.FunctionOf(
+ formals1 mapConserve (_.underlyingIfRepeated(mt.isJava)), mt.resultType)
+ }
+
+ /** The signature of this type. This is by default NotAMethod,
+ * but is overridden for PolyTypes, MethodTypes, and TermRefWithSignature types.
+ * (the reason why we deviate from the "final-method-with-pattern-match-in-base-class"
+ * pattern is that method signatures use caching, so encapsulation
+ * is improved using an OO scheme).
+ */
+ def signature(implicit ctx: Context): Signature = Signature.NotAMethod
+
+ /** Convert to text */
+ def toText(printer: Printer): Text = printer.toText(this)
+
+ /** Utility method to show the underlying type of a TypeProxy chain together
+ * with the proxy type itself.
+ */
+ def showWithUnderlying(n: Int = 1)(implicit ctx: Context): String = this match {
+ case tp: TypeProxy if n > 0 => s"$show with underlying ${tp.underlying.showWithUnderlying(n - 1)}"
+ case _ => show
+ }
+
+ /** A simplified version of this type which is equivalent wrt =:= to this type.
+ * This applies a typemap to the type which (as all typemaps) follows type
+ * variable instances and reduces typerefs over refined types. It also
+ * re-evaluates all occurrences of And/OrType with &/| because
+ * what was a union or intersection of type variables might be a simpler type
+ * after the type variables are instantiated. Finally, it
+ * maps poly params in the current constraint set back to their type vars.
+ */
+ def simplified(implicit ctx: Context) = ctx.simplify(this, null)
+
+ /** customized hash code of this type.
+ * NotCached for uncached types. Cached types
+ * compute hash and use it as the type's hashCode.
+ */
+ def hash: Int
+ } // end Type
+
+// ----- Type categories ----------------------------------------------
+
+ /** A marker trait for cached types */
+ trait CachedType extends Type
+
+ /** A marker trait for type proxies.
+ * Each implementation is expected to redefine the `underlying` method.
+ */
+ abstract class TypeProxy extends Type {
+
+ /** The type to which this proxy forwards operations. */
+ def underlying(implicit ctx: Context): Type
+
+ /** The closest supertype of this type. This is the same as `underlying`,
+ * except for TypeRefs where the upper bound is returned, and HKApplys,
+ * where the upper bound of the constructor is re-applied to the arguments.
+ */
+ def superType(implicit ctx: Context): Type = underlying
+ }
+
+ // Every type has to inherit one of the following four abstract type classes.,
+ // which determine whether the type is cached, and whether
+ // it is a proxy of some other type. The duplication in their methods
+ // is for efficiency.
+
+ /** Instances of this class are cached and are not proxies. */
+ abstract class CachedGroundType extends Type with CachedType {
+ private[this] var myHash = HashUnknown
+ final def hash = {
+ if (myHash == HashUnknown) {
+ myHash = computeHash
+ assert(myHash != HashUnknown)
+ }
+ myHash
+ }
+ override final def hashCode =
+ if (hash == NotCached) System.identityHashCode(this) else hash
+ def computeHash: Int
+ }
+
+ /** Instances of this class are cached and are proxies. */
+ abstract class CachedProxyType extends TypeProxy with CachedType {
+ protected[this] var myHash = HashUnknown
+ final def hash = {
+ if (myHash == HashUnknown) {
+ myHash = computeHash
+ assert(myHash != HashUnknown)
+ }
+ myHash
+ }
+ override final def hashCode =
+ if (hash == NotCached) System.identityHashCode(this) else hash
+ def computeHash: Int
+ }
+
+ /** Instances of this class are uncached and are not proxies. */
+ abstract class UncachedGroundType extends Type {
+ final def hash = NotCached
+ if (monitored) {
+ record(s"uncachable")
+ record(s"uncachable: $getClass")
+ }
+ }
+
+ /** Instances of this class are uncached and are proxies. */
+ abstract class UncachedProxyType extends TypeProxy {
+ final def hash = NotCached
+ if (monitored) {
+ record(s"uncachable")
+ record(s"uncachable: $getClass")
+ }
+ }
+
+ /** A marker trait for types that apply only to type symbols */
+ trait TypeType extends Type
+
+ /** A marker trait for types that apply only to term symbols or that
+ * represent higher-kinded types.
+ */
+ trait TermType extends Type
+
+ /** A marker trait for types that can be types of values or prototypes of value types */
+ trait ValueTypeOrProto extends TermType
+
+ /** A marker trait for types that can be types of values or that are higher-kinded */
+ trait ValueType extends ValueTypeOrProto
+
+ /** A marker trait for types that are guaranteed to contain only a
+ * single non-null value (they might contain null in addition).
+ */
+ trait SingletonType extends TypeProxy with ValueType {
+ def isOverloaded(implicit ctx: Context) = false
+ }
+
+ /** A marker trait for types that bind other types that refer to them.
+ * Instances are: PolyType, MethodType, RefinedType.
+ */
+ trait BindingType extends Type
+
+ /** A trait for proto-types, used as expected types in typer */
+ trait ProtoType extends Type {
+ def isMatchedBy(tp: Type)(implicit ctx: Context): Boolean
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context): T
+ def map(tm: TypeMap)(implicit ctx: Context): ProtoType
+ }
+
+ /** Implementations of this trait cache the results of `narrow`. */
+ trait NarrowCached extends Type {
+ private var myNarrow: TermRef = null
+ override def narrow(implicit ctx: Context): TermRef = {
+ if (myNarrow eq null) myNarrow = super.narrow
+ myNarrow
+ }
+ }
+
+// --- NamedTypes ------------------------------------------------------------------
+
+ /** A NamedType of the form Prefix # name */
+ abstract class NamedType extends CachedProxyType with ValueType {
+
+ val prefix: Type
+ val name: Name
+
+ type ThisType >: this.type <: NamedType
+
+ assert(prefix.isValueType || (prefix eq NoPrefix), s"invalid prefix $prefix")
+
+ private[this] var lastDenotation: Denotation = _
+ private[this] var lastSymbol: Symbol = _
+ private[this] var checkedPeriod = Nowhere
+
+ // Invariants:
+ // (1) checkedPeriod != Nowhere => lastDenotation != null
+ // (2) lastDenotation != null => lastSymbol != null
+
+ /** There is a denotation computed which is valid (somewhere in) the
+ * current run.
+ */
+ def denotationIsCurrent(implicit ctx: Context) =
+ lastDenotation != null && lastDenotation.validFor.runId == ctx.runId
+
+ /** The denotation is current, its symbol, otherwise NoDenotation.
+ *
+ * Note: This operation does not force the denotation, and is therefore
+ * timing dependent. It should only be used if the outcome of the
+ * essential computation does not depend on the symbol being present or not.
+ * It's currently used to take an optimized path in substituters and
+ * type accumulators, as well as to be safe in diagnostic printing.
+ * Normally, it's better to use `symbol`, not `currentSymbol`.
+ */
+ def currentSymbol(implicit ctx: Context) =
+ if (denotationIsCurrent) symbol else NoSymbol
+
+ /** The denotation currently denoted by this type */
+ final def denot(implicit ctx: Context): Denotation = {
+ val now = ctx.period
+ if (checkedPeriod == now) lastDenotation else denotAt(now)
+ }
+
+ /** A first fall back to do a somewhat more expensive calculation in case the first
+ * attempt in `denot` does not yield a denotation.
+ */
+ private def denotAt(now: Period)(implicit ctx: Context): Denotation = {
+ val d = lastDenotation
+ if (d != null && (d.validFor contains now)) {
+ checkedPeriod = now
+ d
+ }
+ else computeDenot
+ }
+
+ /** Hook for adding debug check code when denotations are assigned */
+ final def checkDenot()(implicit ctx: Context) = {}
+
+ /** A second fallback to recompute the denotation if necessary */
+ private def computeDenot(implicit ctx: Context): Denotation = {
+ val savedEphemeral = ctx.typerState.ephemeral
+ ctx.typerState.ephemeral = false
+ try {
+ val d = lastDenotation match {
+ case null =>
+ val sym = lastSymbol
+ if (sym == null) loadDenot else denotOfSym(sym)
+ case d: SymDenotation =>
+ if (this.isInstanceOf[WithFixedSym]) d.current
+ else if (d.validFor.runId == ctx.runId || ctx.stillValid(d))
+ if (d.exists && prefix.isTightPrefix(d.owner) || d.isConstructor) d.current
+ else recomputeMember(d) // symbol could have been overridden, recompute membership
+ else {
+ val newd = loadDenot
+ if (newd.exists) newd else d.staleSymbolError
+ }
+ case d =>
+ if (d.validFor.runId != ctx.period.runId) loadDenot
+ else d.current
+ }
+ if (ctx.typerState.ephemeral) record("ephemeral cache miss: loadDenot")
+ else if (d.exists) {
+ // Avoid storing NoDenotations in the cache - we will not be able to recover from
+ // them. The situation might arise that a type has NoDenotation in some later
+ // phase but a defined denotation earlier (e.g. a TypeRef to an abstract type
+ // is undefined after erasure.) We need to be able to do time travel back and
+ // forth also in these cases.
+
+ // Don't use setDenot here; double binding checks can give spurious failures after erasure
+ lastDenotation = d
+ checkDenot()
+ lastSymbol = d.symbol
+ checkedPeriod = ctx.period
+ }
+ d
+ }
+ finally ctx.typerState.ephemeral |= savedEphemeral
+ }
+
+ /** A member of `prefix` (disambiguated by `d.signature`) or, if none was found, `d.current`. */
+ private def recomputeMember(d: SymDenotation)(implicit ctx: Context): Denotation =
+ asMemberOf(prefix) match {
+ case NoDenotation => d.current
+ case newd: SingleDenotation => newd
+ case newd =>
+ newd.atSignature(d.signature) match {
+ case newd1: SingleDenotation if newd1.exists => newd1
+ case _ => d.current
+ }
+ }
+
+ private def denotOfSym(sym: Symbol)(implicit ctx: Context): Denotation = {
+ val d = sym.denot
+ val owner = d.owner
+ if (owner.isTerm) d else d.asSeenFrom(prefix)
+ }
+
+ private def checkSymAssign(sym: Symbol)(implicit ctx: Context) = {
+ def selfTypeOf(sym: Symbol) = sym.owner.info match {
+ case info: ClassInfo => info.givenSelfType
+ case _ => NoType
+ }
+ assert(
+ (lastSymbol eq sym) ||
+ (lastSymbol eq null) || {
+ val lastDefRunId = lastDenotation match {
+ case d: SymDenotation => d.validFor.runId
+ case _ => lastSymbol.defRunId
+ }
+ (lastDefRunId != sym.defRunId) ||
+ (lastDefRunId == NoRunId)
+ } ||
+ (lastSymbol.infoOrCompleter == ErrorType ||
+ sym.owner != lastSymbol.owner &&
+ (sym.owner.derivesFrom(lastSymbol.owner) ||
+ selfTypeOf(sym).derivesFrom(lastSymbol.owner) ||
+ selfTypeOf(lastSymbol).derivesFrom(sym.owner))),
+ i"""data race? overwriting symbol of type $this,
+ |long form = $toString of class $getClass,
+ |last sym id = ${lastSymbol.id}, new sym id = ${sym.id},
+ |last owner = ${lastSymbol.owner}, new owner = ${sym.owner},
+ |period = ${ctx.phase} at run ${ctx.runId}""")
+ }
+
+ protected def sig: Signature = Signature.NotAMethod
+
+ private[dotc] def withDenot(denot: Denotation)(implicit ctx: Context): ThisType =
+ if (sig != denot.signature)
+ withSig(denot.signature).withDenot(denot).asInstanceOf[ThisType]
+ else {
+ setDenot(denot)
+ this
+ }
+
+ private[dotc] final def setDenot(denot: Denotation)(implicit ctx: Context): Unit = {
+ if (Config.checkNoDoubleBindings)
+ if (ctx.settings.YnoDoubleBindings.value)
+ checkSymAssign(denot.symbol)
+
+ // additional checks that intercept `denot` can be added here
+
+ lastDenotation = denot
+ checkDenot()
+ lastSymbol = denot.symbol
+ checkedPeriod = Nowhere
+ }
+
+ private[dotc] def withSym(sym: Symbol, signature: Signature)(implicit ctx: Context): ThisType =
+ if (sig != signature)
+ withSig(signature).withSym(sym, signature).asInstanceOf[ThisType]
+ else {
+ setSym(sym)
+ this
+ }
+
+ private[dotc] final def setSym(sym: Symbol)(implicit ctx: Context): Unit = {
+ if (Config.checkNoDoubleBindings)
+ if (ctx.settings.YnoDoubleBindings.value)
+ checkSymAssign(sym)
+ uncheckedSetSym(sym)
+ }
+
+ private[dotc] final def uncheckedSetSym(sym: Symbol): Unit = {
+ lastDenotation = null
+ lastSymbol = sym
+ checkedPeriod = Nowhere
+ }
+
+ private def withSig(sig: Signature)(implicit ctx: Context): NamedType =
+ TermRef.withSig(prefix, name.asTermName, sig)
+
+ protected def loadDenot(implicit ctx: Context): Denotation = {
+ val d = asMemberOf(prefix)
+ if (d.exists || ctx.phaseId == FirstPhaseId || !lastDenotation.isInstanceOf[SymDenotation])
+ d
+ else { // name has changed; try load in earlier phase and make current
+ val d = loadDenot(ctx.withPhase(ctx.phaseId - 1)).current
+ if (d.exists) d
+ else throw new Error(s"failure to reload $this of class $getClass")
+ }
+ }
+
+ protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation =
+ if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed)
+ else prefix.member(name)
+
+
+ /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type
+ * to an (unbounded) wildcard type.
+ *
+ * (2) Reduce a type-ref `T { X = U; ... } # X` to `U`
+ * provided `U` does not refer with a RecThis to the
+ * refinement type `T { X = U; ... }`
+ */
+ def reduceProjection(implicit ctx: Context): Type = {
+ val reduced = prefix.lookupRefined(name)
+ if (reduced.exists) reduced else this
+ }
+
+ def symbol(implicit ctx: Context): Symbol = {
+ val now = ctx.period
+ if (checkedPeriod == now ||
+ lastDenotation == null && lastSymbol != null) lastSymbol
+ else denot.symbol
+ }
+
+ /** Retrieves currently valid symbol without necessarily updating denotation.
+ * Assumes that symbols do not change between periods in the same run.
+ * Used to get the class underlying a ThisType.
+ */
+ private[Types] def stableInRunSymbol(implicit ctx: Context): Symbol =
+ if (checkedPeriod.runId == ctx.runId) lastSymbol
+ else symbol
+
+ def info(implicit ctx: Context): Type = denot.info
+
+ def isType = isInstanceOf[TypeRef]
+ def isTerm = isInstanceOf[TermRef]
+
+ /** Guard against cycles that can arise if given `op`
+ * follows info. The problematic cases are a type alias to itself or
+ * bounded by itself or a val typed as itself:
+ *
+ * type T <: T
+ * val x: x.type
+ *
+ * These are errors but we have to make sure that operations do
+ * not loop before the error is detected.
+ */
+ final def controlled[T](op: => T)(implicit ctx: Context): T = try {
+ ctx.underlyingRecursions += 1
+ if (ctx.underlyingRecursions < Config.LogPendingUnderlyingThreshold)
+ op
+ else if (ctx.pendingUnderlying contains this)
+ throw CyclicReference(symbol)
+ else
+ try {
+ ctx.pendingUnderlying += this
+ op
+ } finally {
+ ctx.pendingUnderlying -= this
+ }
+ } finally {
+ ctx.underlyingRecursions -= 1
+ }
+
+ /** A selection of the same kind, but with potentially a different prefix.
+ * The following normalizations are performed for type selections T#A:
+ *
+ * T#A --> B if A is bound to an alias `= B` in T
+ *
+ * If Config.splitProjections is set:
+ *
+ * (S & T)#A --> S#A if T does not have a member named A
+ * --> T#A if S does not have a member named A
+ * --> S#A & T#A otherwise
+ * (S | T)#A --> S#A | T#A
+ */
+ def derivedSelect(prefix: Type)(implicit ctx: Context): Type =
+ if (prefix eq this.prefix) this
+ else if (isType) {
+ val res = prefix.lookupRefined(name)
+ if (res.exists) res
+ else if (Config.splitProjections)
+ prefix match {
+ case prefix: AndType =>
+ def isMissing(tp: Type) = tp match {
+ case tp: TypeRef => !tp.info.exists
+ case _ => false
+ }
+ val derived1 = derivedSelect(prefix.tp1)
+ val derived2 = derivedSelect(prefix.tp2)
+ return (
+ if (isMissing(derived1)) derived2
+ else if (isMissing(derived2)) derived1
+ else prefix.derivedAndType(derived1, derived2))
+ case prefix: OrType =>
+ val derived1 = derivedSelect(prefix.tp1)
+ val derived2 = derivedSelect(prefix.tp2)
+ return prefix.derivedOrType(derived1, derived2)
+ case _ =>
+ newLikeThis(prefix)
+ }
+ else newLikeThis(prefix)
+ }
+ else newLikeThis(prefix)
+
+ /** Create a NamedType of the same kind as this type, but with a new prefix.
+ */
+ def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType =
+ NamedType(prefix, name)
+
+ /** Create a NamedType of the same kind as this type, but with a "inherited name".
+ * This is necessary to in situations like the following:
+ *
+ * class B { def m: T1 }
+ * class C extends B { private def m: T2; ... C.m }
+ * object C extends C
+ * object X { ... C.m }
+ *
+ * The two references of C.m in class C and object X refer to different
+ * definitions: The one in C refers to C#m whereas the one in X refers to B#m.
+ * But the type C.m must have only one denotation, so it can't refer to two
+ * members depending on context.
+ *
+ * In situations like this, the reference in X would get the type
+ * `<C.m>.shadowed` to make clear that we mean the inherited member, not
+ * the private one.
+ *
+ * Note: An alternative, possibly more robust scheme would be to give
+ * private members special names. A private definition would have a special
+ * name (say m' in the example above), but would be entered in its enclosing
+ * under both private and public names, so it could still be found by looking up
+ * the public name.
+ */
+ final def shadowed(implicit ctx: Context): NamedType =
+ NamedType(prefix, name.shadowedName)
+
+ override def equals(that: Any) = that match {
+ case that: NamedType =>
+ this.name == that.name &&
+ this.prefix == that.prefix &&
+ !that.isInstanceOf[TermRefWithSignature] &&
+ !that.isInstanceOf[WithFixedSym]
+ case _ =>
+ false
+ }
+
+ /* A version of toString which also prints aliases. Can be used for debugging
+ override def toString =
+ if (isTerm) s"TermRef($prefix, $name)"
+ else s"TypeRef($prefix, $name)${
+ if (lastDenotation != null && lastDenotation.infoOrCompleter.isAlias)
+ s"@@@ ${lastDenotation.infoOrCompleter.asInstanceOf[TypeAlias].hi}"
+ else ""}"
+ */
+ }
+
+ abstract case class TermRef(override val prefix: Type, name: TermName) extends NamedType with SingletonType {
+
+ type ThisType = TermRef
+
+ //assert(name.toString != "<local Coder>")
+ override def underlying(implicit ctx: Context): Type = {
+ val d = denot
+ if (d.isOverloaded) NoType else d.info
+ }
+
+ override def signature(implicit ctx: Context): Signature = denot.signature
+
+ override def isOverloaded(implicit ctx: Context) = denot.isOverloaded
+
+ private def rewrap(sd: SingleDenotation)(implicit ctx: Context) =
+ TermRef.withSigAndDenot(prefix, name, sd.signature, sd)
+
+ def alternatives(implicit ctx: Context): List[TermRef] =
+ denot.alternatives map rewrap
+
+ def altsWith(p: Symbol => Boolean)(implicit ctx: Context): List[TermRef] =
+ denot.altsWith(p) map rewrap
+ }
+
+ abstract case class TypeRef(override val prefix: Type, name: TypeName) extends NamedType {
+
+ type ThisType = TypeRef
+
+ override def underlying(implicit ctx: Context): Type = info
+
+ override def superType(implicit ctx: Context): Type = info match {
+ case TypeBounds(_, hi) => hi
+ case _ => info
+ }
+ }
+
+ final class TermRefWithSignature(prefix: Type, name: TermName, override val sig: Signature) extends TermRef(prefix, name) {
+ assert(prefix ne NoPrefix)
+ override def signature(implicit ctx: Context) = sig
+ override def loadDenot(implicit ctx: Context): Denotation = {
+ val d = super.loadDenot
+ if (sig eq Signature.OverloadedSignature) d
+ else d.atSignature(sig).checkUnique
+ }
+
+ override def newLikeThis(prefix: Type)(implicit ctx: Context): TermRef = {
+ val candidate = TermRef.withSig(prefix, name, sig)
+ if (symbol.exists && !candidate.symbol.exists) { // recompute from previous symbol
+ val ownSym = symbol
+ val newd = asMemberOf(prefix)
+ candidate.withDenot(newd.suchThat(_.signature == ownSym.signature))
+ }
+ else candidate
+ }
+
+ override def equals(that: Any) = that match {
+ case that: TermRefWithSignature =>
+ this.prefix == that.prefix &&
+ this.name == that.name &&
+ this.sig == that.sig
+ case _ =>
+ false
+ }
+ override def computeHash = doHash((name, sig), prefix)
+ override def toString = super.toString ++ s"/withSig($sig)"
+ }
+
+ trait WithFixedSym extends NamedType {
+ def fixedSym: Symbol
+ assert(fixedSym ne NoSymbol)
+ uncheckedSetSym(fixedSym)
+
+ override def withDenot(denot: Denotation)(implicit ctx: Context): ThisType = {
+ assert(denot.symbol eq fixedSym)
+ setDenot(denot)
+ this
+ }
+
+ override def withSym(sym: Symbol, signature: Signature)(implicit ctx: Context): ThisType =
+ unsupported("withSym")
+
+ override def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType =
+ NamedType.withFixedSym(prefix, fixedSym)
+
+ override def equals(that: Any) = that match {
+ case that: WithFixedSym => this.prefix == that.prefix && (this.fixedSym eq that.fixedSym)
+ case _ => false
+ }
+ override def computeHash = doHash(fixedSym, prefix)
+ }
+
+ final class CachedTermRef(prefix: Type, name: TermName, hc: Int) extends TermRef(prefix, name) {
+ assert(prefix ne NoPrefix)
+ myHash = hc
+ override def computeHash = unsupported("computeHash")
+ }
+
+ final class CachedTypeRef(prefix: Type, name: TypeName, hc: Int) extends TypeRef(prefix, name) {
+ assert(prefix ne NoPrefix)
+ myHash = hc
+ override def computeHash = unsupported("computeHash")
+ }
+
+ // Those classes are non final as Linker extends them.
+ class TermRefWithFixedSym(prefix: Type, name: TermName, val fixedSym: TermSymbol) extends TermRef(prefix, name) with WithFixedSym
+ class TypeRefWithFixedSym(prefix: Type, name: TypeName, val fixedSym: TypeSymbol) extends TypeRef(prefix, name) with WithFixedSym
+
+ /** Assert current phase does not have erasure semantics */
+ private def assertUnerased()(implicit ctx: Context) =
+ if (Config.checkUnerased) assert(!ctx.phase.erasedTypes)
+
+ object NamedType {
+ def apply(prefix: Type, name: Name)(implicit ctx: Context) =
+ if (name.isTermName) TermRef.all(prefix, name.asTermName)
+ else TypeRef(prefix, name.asTypeName)
+ def apply(prefix: Type, name: Name, denot: Denotation)(implicit ctx: Context) =
+ if (name.isTermName) TermRef(prefix, name.asTermName, denot)
+ else TypeRef(prefix, name.asTypeName, denot)
+ def withFixedSym(prefix: Type, sym: Symbol)(implicit ctx: Context) =
+ if (sym.isType) TypeRef.withFixedSym(prefix, sym.name.asTypeName, sym.asType)
+ else TermRef.withFixedSym(prefix, sym.name.asTermName, sym.asTerm)
+ def withSymAndName(prefix: Type, sym: Symbol, name: Name)(implicit ctx: Context): NamedType =
+ if (sym.isType) TypeRef.withSymAndName(prefix, sym.asType, name.asTypeName)
+ else TermRef.withSymAndName(prefix, sym.asTerm, name.asTermName)
+ }
+
+ object TermRef {
+
+ private def symbolicRefs(implicit ctx: Context) = ctx.phase.symbolicRefs
+
+ /** Create term ref with given name, without specifying a signature.
+ * Its meaning is the (potentially multi-) denotation of the member(s)
+ * of prefix with given name.
+ */
+ def all(prefix: Type, name: TermName)(implicit ctx: Context): TermRef = {
+ ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TermRef]
+ }
+
+ /** Create term ref referring to given symbol, taking the signature
+ * from the symbol if it is completed, or creating a term ref without
+ * signature, if symbol is not yet completed.
+ */
+ def apply(prefix: Type, sym: TermSymbol)(implicit ctx: Context): TermRef =
+ withSymAndName(prefix, sym, sym.name)
+
+ /** Create term ref to given initial denotation, taking the signature
+ * from the denotation if it is completed, or creating a term ref without
+ * signature, if denotation is not yet completed.
+ */
+ def apply(prefix: Type, name: TermName, denot: Denotation)(implicit ctx: Context): TermRef = {
+ if ((prefix eq NoPrefix) || denot.symbol.isFresh || symbolicRefs)
+ apply(prefix, denot.symbol.asTerm)
+ else denot match {
+ case denot: SymDenotation if denot.isCompleted => withSig(prefix, name, denot.signature)
+ case _ => all(prefix, name)
+ }
+ } withDenot denot
+
+ /** Create a non-member term ref (which cannot be reloaded using `member`),
+ * with given prefix, name, and signature
+ */
+ def withFixedSym(prefix: Type, name: TermName, sym: TermSymbol)(implicit ctx: Context): TermRef =
+ unique(new TermRefWithFixedSym(prefix, name, sym))
+
+ /** Create a term ref referring to given symbol with given name, taking the signature
+ * from the symbol if it is completed, or creating a term ref without
+ * signature, if symbol is not yet completed. This is very similar to TermRef(Type, Symbol),
+ * except for two differences:
+ * (1) The symbol might not yet have a denotation, so the name needs to be given explicitly.
+ * (2) The name in the term ref need not be the same as the name of the Symbol.
+ */
+ def withSymAndName(prefix: Type, sym: TermSymbol, name: TermName)(implicit ctx: Context): TermRef =
+ if ((prefix eq NoPrefix) || sym.isFresh || symbolicRefs)
+ withFixedSym(prefix, name, sym)
+ else if (sym.defRunId != NoRunId && sym.isCompleted)
+ withSig(prefix, name, sym.signature) withSym (sym, sym.signature)
+ // Linker note:
+ // this is problematic, as withSig method could return a hash-consed refference
+ // that could have symbol already set making withSym trigger a double-binding error
+ // ./tests/run/absoverride.scala demonstates this
+ else
+ all(prefix, name) withSym (sym, Signature.NotAMethod)
+
+ /** Create a term ref to given symbol, taking the signature from the symbol
+ * (which must be completed).
+ */
+ def withSig(prefix: Type, sym: TermSymbol)(implicit ctx: Context): TermRef =
+ if ((prefix eq NoPrefix) || sym.isFresh || symbolicRefs) withFixedSym(prefix, sym.name, sym)
+ else withSig(prefix, sym.name, sym.signature).withSym(sym, sym.signature)
+
+ /** Create a term ref with given prefix, name and signature */
+ def withSig(prefix: Type, name: TermName, sig: Signature)(implicit ctx: Context): TermRef =
+ unique(new TermRefWithSignature(prefix, name, sig))
+
+ /** Create a term ref with given prefix, name, signature, and initial denotation */
+ def withSigAndDenot(prefix: Type, name: TermName, sig: Signature, denot: Denotation)(implicit ctx: Context): TermRef = {
+ if ((prefix eq NoPrefix) || denot.symbol.isFresh || symbolicRefs)
+ withFixedSym(prefix, denot.symbol.asTerm.name, denot.symbol.asTerm)
+ else
+ withSig(prefix, name, sig)
+ } withDenot denot
+ }
+
+ object TypeRef {
+ /** Create type ref with given prefix and name */
+ def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef =
+ ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef]
+
+ /** Create type ref to given symbol */
+ def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
+ withSymAndName(prefix, sym, sym.name)
+
+ /** Create a non-member type ref (which cannot be reloaded using `member`),
+ * with given prefix, name, and symbol.
+ */
+ def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
+ unique(new TypeRefWithFixedSym(prefix, name, sym))
+
+ /** Create a type ref referring to given symbol with given name.
+ * This is very similar to TypeRef(Type, Symbol),
+ * except for two differences:
+ * (1) The symbol might not yet have a denotation, so the name needs to be given explicitly.
+ * (2) The name in the type ref need not be the same as the name of the Symbol.
+ */
+ def withSymAndName(prefix: Type, sym: TypeSymbol, name: TypeName)(implicit ctx: Context): TypeRef =
+ if ((prefix eq NoPrefix) || sym.isFresh) withFixedSym(prefix, name, sym)
+ else apply(prefix, name).withSym(sym, Signature.NotAMethod)
+
+ /** Create a type ref with given name and initial denotation */
+ def apply(prefix: Type, name: TypeName, denot: Denotation)(implicit ctx: Context): TypeRef = {
+ if ((prefix eq NoPrefix) || denot.symbol.isFresh) apply(prefix, denot.symbol.asType)
+ else apply(prefix, name)
+ } withDenot denot
+ }
+
+ // --- Other SingletonTypes: ThisType/SuperType/ConstantType ---------------------------
+
+ /** The type cls.this
+ * @param tref A type ref which indicates the class `cls`.
+ * Note: we do not pass a class symbol directly, because symbols
+ * do not survive runs whereas typerefs do.
+ */
+ abstract case class ThisType(tref: TypeRef) extends CachedProxyType with SingletonType {
+ def cls(implicit ctx: Context): ClassSymbol = tref.stableInRunSymbol.asClass
+ override def underlying(implicit ctx: Context): Type =
+ if (ctx.erasedTypes) tref else cls.classInfo.selfType
+ override def computeHash = doHash(tref)
+ }
+
+ final class CachedThisType(tref: TypeRef) extends ThisType(tref)
+
+ object ThisType {
+ /** Normally one should use ClassSymbol#thisType instead */
+ def raw(tref: TypeRef)(implicit ctx: Context) =
+ unique(new CachedThisType(tref))
+ }
+
+ /** The type of a super reference cls.super where
+ * `thistpe` is cls.this and `supertpe` is the type of the value referenced
+ * by `super`.
+ */
+ abstract case class SuperType(thistpe: Type, supertpe: Type) extends CachedProxyType with SingletonType {
+ override def underlying(implicit ctx: Context) = supertpe
+ def derivedSuperType(thistpe: Type, supertpe: Type)(implicit ctx: Context) =
+ if ((thistpe eq this.thistpe) && (supertpe eq this.supertpe)) this
+ else SuperType(thistpe, supertpe)
+ override def computeHash = doHash(thistpe, supertpe)
+ }
+
+ final class CachedSuperType(thistpe: Type, supertpe: Type) extends SuperType(thistpe, supertpe)
+
+ object SuperType {
+ def apply(thistpe: Type, supertpe: Type)(implicit ctx: Context): Type = {
+ assert(thistpe != NoPrefix)
+ unique(new CachedSuperType(thistpe, supertpe))
+ }
+ }
+
+ /** A constant type with single `value`. */
+ abstract case class ConstantType(value: Constant) extends CachedProxyType with SingletonType {
+ override def underlying(implicit ctx: Context) = value.tpe
+ override def computeHash = doHash(value)
+ }
+
+ final class CachedConstantType(value: Constant) extends ConstantType(value)
+
+ object ConstantType {
+ def apply(value: Constant)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedConstantType(value))
+ }
+ }
+
+ case class LazyRef(refFn: () => Type) extends UncachedProxyType with ValueType {
+ private var myRef: Type = null
+ private var computed = false
+ def ref = {
+ if (computed) assert(myRef != null)
+ else {
+ computed = true
+ myRef = refFn()
+ }
+ myRef
+ }
+ def evaluating = computed && myRef == null
+ override def underlying(implicit ctx: Context) = ref
+ override def toString = s"LazyRef($ref)"
+ override def equals(other: Any) = other match {
+ case other: LazyRef => this.ref.equals(other.ref)
+ case _ => false
+ }
+ override def hashCode = ref.hashCode + 37
+ }
+
+ // --- Refined Type and RecType ------------------------------------------------
+
+ abstract class RefinedOrRecType extends CachedProxyType with ValueType {
+ def parent: Type
+ }
+
+ /** A refined type parent { refinement }
+ * @param refinedName The name of the refinement declaration
+ * @param infoFn: A function that produces the info of the refinement declaration,
+ * given the refined type itself.
+ */
+ abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType {
+
+ override def underlying(implicit ctx: Context) = parent
+
+ private def badInst =
+ throw new AssertionError(s"bad instantiation: $this")
+
+ def checkInst(implicit ctx: Context): this.type = this // debug hook
+
+ def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type =
+ if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this
+ else RefinedType(parent, refinedName, refinedInfo)
+
+ /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */
+ def wrapIfMember(parent: Type)(implicit ctx: Context): Type =
+ if (parent.member(refinedName).exists) derivedRefinedType(parent, refinedName, refinedInfo)
+ else parent
+
+ override def equals(that: Any) = that match {
+ case that: RefinedType =>
+ this.parent == that.parent &&
+ this.refinedName == that.refinedName &&
+ this.refinedInfo == that.refinedInfo
+ case _ =>
+ false
+ }
+ override def computeHash = doHash(refinedName, refinedInfo, parent)
+ override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)"
+ }
+
+ class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int)
+ extends RefinedType(parent, refinedName, refinedInfo) {
+ myHash = hc
+ override def computeHash = unsupported("computeHash")
+ }
+
+ object RefinedType {
+ def make(parent: Type, names: List[Name], infos: List[Type])(implicit ctx: Context): Type =
+ if (names.isEmpty) parent
+ else make(RefinedType(parent, names.head, infos.head), names.tail, infos.tail)
+
+ def apply(parent: Type, name: Name, info: Type)(implicit ctx: Context): RefinedType = {
+ assert(!ctx.erasedTypes)
+ ctx.base.uniqueRefinedTypes.enterIfNew(parent, name, info).checkInst
+ }
+ }
+
+ class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType {
+
+ // See discussion in findMember#goRec why these vars are needed
+ private[Types] var opened: Boolean = false
+ private[Types] var openedTwice: Boolean = false
+
+ val parent = parentExp(this)
+
+ override def underlying(implicit ctx: Context): Type = parent
+
+ def derivedRecType(parent: Type)(implicit ctx: Context): RecType =
+ if (parent eq this.parent) this
+ else RecType(rt => parent.substRecThis(this, RecThis(rt)))
+
+ def rebind(parent: Type)(implicit ctx: Context): Type =
+ if (parent eq this.parent) this
+ else RecType.closeOver(rt => parent.substRecThis(this, RecThis(rt)))
+
+ override def equals(other: Any) = other match {
+ case other: RecType => other.parent == this.parent
+ case _ => false
+ }
+
+ def isReferredToBy(tp: Type)(implicit ctx: Context): Boolean = {
+ val refacc = new TypeAccumulator[Boolean] {
+ override def apply(x: Boolean, tp: Type) = x || {
+ tp match {
+ case tp: TypeRef => apply(x, tp.prefix)
+ case tp: RecThis => RecType.this eq tp.binder
+ case tp: LazyRef => true // To be safe, assume a reference exists
+ case _ => foldOver(x, tp)
+ }
+ }
+ }
+ refacc.apply(false, tp)
+ }
+
+ override def computeHash = doHash(parent)
+ override def toString = s"RecType($parent | $hashCode)"
+
+ private def checkInst(implicit ctx: Context): this.type = this // debug hook
+ }
+
+ object RecType {
+
+ /** Create a RecType, normalizing its contents. This means:
+ *
+ * 1. Nested Rec types on the type's spine are merged with the outer one.
+ * 2. Any refinement of the form `type T = z.T` on the spine of the type
+ * where `z` refers to the created rec-type is replaced by
+ * `type T`. This avoids infinite recursons later when we
+ * try to follow these references.
+ * TODO: Figure out how to guarantee absence of cycles
+ * of length > 1
+ */
+ def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = {
+ val rt = new RecType(parentExp)
+ def normalize(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType =>
+ normalize(tp.parent.substRecThis(tp, RecThis(rt)))
+ case tp @ RefinedType(parent, rname, rinfo) =>
+ val rinfo1 = rinfo match {
+ case TypeAlias(TypeRef(RecThis(`rt`), `rname`)) => TypeBounds.empty
+ case _ => rinfo
+ }
+ tp.derivedRefinedType(normalize(parent), rname, rinfo1)
+ case tp =>
+ tp
+ }
+ unique(rt.derivedRecType(normalize(rt.parent))).checkInst
+ }
+ def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = {
+ val rt = this(parentExp)
+ if (rt.isReferredToBy(rt.parent)) rt else rt.parent
+ }
+ }
+
+ // --- AndType/OrType ---------------------------------------------------------------
+
+ trait AndOrType extends ValueType { // todo: check where we can simplify using AndOrType
+ def tp1: Type
+ def tp2: Type
+ def isAnd: Boolean
+ def derivedAndOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type // needed?
+ }
+
+ abstract case class AndType(tp1: Type, tp2: Type) extends CachedGroundType with AndOrType {
+
+ def isAnd = true
+
+ def derivedAndType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else AndType.make(tp1, tp2)
+
+ def derived_& (tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else tp1 & tp2
+
+ def derivedAndOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ derivedAndType(tp1, tp2)
+
+ override def computeHash = doHash(tp1, tp2)
+ }
+
+ final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2)
+
+ object AndType {
+ def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ assert(tp1.isValueType && tp2.isValueType, i"$tp1 & $tp2 / " + s"$tp1 & $tp2")
+ unchecked(tp1, tp2)
+ }
+ def unchecked(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedAndType(tp1, tp2))
+ }
+ def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq tp2) || (tp2 eq defn.AnyType))
+ tp1
+ else if (tp1 eq defn.AnyType)
+ tp2
+ else
+ apply(tp1, tp2)
+ }
+
+ abstract case class OrType(tp1: Type, tp2: Type) extends CachedGroundType with AndOrType {
+
+ assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType])
+ def isAnd = false
+
+ private[this] var myJoin: Type = _
+ private[this] var myJoinPeriod: Period = Nowhere
+
+ /** Replace or type by the closest non-or type above it */
+ def join(implicit ctx: Context): Type = {
+ if (myJoinPeriod != ctx.period) {
+ myJoin = ctx.orDominator(this)
+ core.println(i"join of $this == $myJoin")
+ assert(myJoin != this)
+ myJoinPeriod = ctx.period
+ }
+ myJoin
+ }
+
+ def derivedOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else OrType.make(tp1, tp2)
+
+ def derivedAndOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ derivedOrType(tp1, tp2)
+
+ override def computeHash = doHash(tp1, tp2)
+ }
+
+ final class CachedOrType(tp1: Type, tp2: Type) extends OrType(tp1, tp2)
+
+ object OrType {
+ def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedOrType(tp1, tp2))
+ }
+ def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if (tp1 eq tp2) tp1 else apply(tp1, tp2)
+ }
+
+ // ----- Method types: MethodType/ExprType/PolyType -------------------------------
+
+ // Note: method types are cached whereas poly types are not. The reason
+ // is that most poly types are cyclic via poly params,
+ // and therefore two different poly types would never be equal.
+
+ /** A trait that mixes in functionality for signature caching */
+ trait MethodicType extends TermType {
+
+ private[this] var mySignature: Signature = _
+ private[this] var mySignatureRunId: Int = NoRunId
+
+ protected def computeSignature(implicit ctx: Context): Signature
+
+ protected def resultSignature(implicit ctx: Context) = try resultType match {
+ case rtp: MethodicType => rtp.signature
+ case tp => Signature(tp, isJava = false)
+ }
+ catch {
+ case ex: AssertionError =>
+ println(i"failure while taking result signture of $this: $resultType")
+ throw ex
+ }
+
+ final override def signature(implicit ctx: Context): Signature = {
+ if (ctx.runId != mySignatureRunId) {
+ mySignature = computeSignature
+ if (!mySignature.isUnderDefined) mySignatureRunId = ctx.runId
+ }
+ mySignature
+ }
+ }
+
+ trait MethodOrPoly extends MethodicType
+
+ abstract case class MethodType(paramNames: List[TermName], paramTypes: List[Type])
+ (resultTypeExp: MethodType => Type)
+ extends CachedGroundType with BindingType with TermType with MethodOrPoly with NarrowCached { thisMethodType =>
+ import MethodType._
+
+ def isJava = false
+ def isImplicit = false
+
+ private[core] val resType = resultTypeExp(this)
+ assert(resType.exists)
+
+ override def resultType(implicit ctx: Context): Type =
+ if (dependencyStatus == FalseDeps) { // dealias all false dependencies
+ val dealiasMap = new TypeMap {
+ def apply(tp: Type) = tp match {
+ case tp @ TypeRef(pre, name) =>
+ tp.info match {
+ case TypeAlias(alias) if depStatus(pre) == TrueDeps => apply(alias)
+ case _ => mapOver(tp)
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ dealiasMap(resType)
+ }
+ else resType
+
+ var myDependencyStatus: DependencyStatus = Unknown
+
+ private def depStatus(tp: Type)(implicit ctx: Context): DependencyStatus = {
+ def combine(x: DependencyStatus, y: DependencyStatus) = {
+ val status = (x & StatusMask) max (y & StatusMask)
+ val provisional = (x | y) & Provisional
+ (if (status == TrueDeps) status else status | provisional).toByte
+ }
+ val depStatusAcc = new TypeAccumulator[DependencyStatus] {
+ def apply(status: DependencyStatus, tp: Type) =
+ if (status == TrueDeps) status
+ else
+ tp match {
+ case MethodParam(`thisMethodType`, _) => TrueDeps
+ case tp: TypeRef =>
+ val status1 = foldOver(status, tp)
+ tp.info match { // follow type alias to avoid dependency
+ case TypeAlias(alias) if status1 == TrueDeps && status != TrueDeps =>
+ combine(apply(status, alias), FalseDeps)
+ case _ =>
+ status1
+ }
+ case tp: TypeVar if !tp.isInstantiated => combine(status, Provisional)
+ case _ => foldOver(status, tp)
+ }
+ }
+ depStatusAcc(NoDeps, tp)
+ }
+
+ /** The dependency status of this method. Some examples:
+ *
+ * class C extends { type S; type T = String }
+ * def f(x: C)(y: Boolean) // dependencyStatus = NoDeps
+ * def f(x: C)(y: x.S) // dependencyStatus = TrueDeps
+ * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e.
+ * // dependency can be eliminated by dealiasing.
+ */
+ private def dependencyStatus(implicit ctx: Context): DependencyStatus = {
+ if (myDependencyStatus != Unknown) myDependencyStatus
+ else {
+ val result = depStatus(resType)
+ if ((result & Provisional) == 0) myDependencyStatus = result
+ (result & StatusMask).toByte
+ }
+ }
+
+ /** Does result type contain references to parameters of this method type,
+ * which cannot be eliminated by de-aliasing?
+ */
+ def isDependent(implicit ctx: Context): Boolean = dependencyStatus == TrueDeps
+
+ protected def computeSignature(implicit ctx: Context): Signature =
+ resultSignature.prepend(paramTypes, isJava)
+
+ def derivedMethodType(paramNames: List[TermName], paramTypes: List[Type], resType: Type)(implicit ctx: Context) =
+ if ((paramNames eq this.paramNames) && (paramTypes eq this.paramTypes) && (resType eq this.resType)) this
+ else {
+ val resTypeFn = (x: MethodType) => resType.subst(this, x)
+ if (isJava) JavaMethodType(paramNames, paramTypes)(resTypeFn)
+ else if (isImplicit) ImplicitMethodType(paramNames, paramTypes)(resTypeFn)
+ else MethodType(paramNames, paramTypes)(resTypeFn)
+ }
+
+ def instantiate(argTypes: => List[Type])(implicit ctx: Context): Type =
+ if (isDependent) resultType.substParams(this, argTypes)
+ else resultType
+
+ override def equals(that: Any) = that match {
+ case that: MethodType =>
+ this.paramNames == that.paramNames &&
+ this.paramTypes == that.paramTypes &&
+ this.resType == that.resType
+ case _ =>
+ false
+ }
+
+ override def computeHash = doHash(paramNames, resType, paramTypes)
+
+ protected def prefixString = "MethodType"
+ override def toString = s"$prefixString($paramNames, $paramTypes, $resType)"
+ }
+
+ final class CachedMethodType(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)
+ extends MethodType(paramNames, paramTypes)(resultTypeExp) {
+ override def equals(that: Any) = super.equals(that) && that.isInstanceOf[CachedMethodType]
+ }
+
+ final class JavaMethodType(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)
+ extends MethodType(paramNames, paramTypes)(resultTypeExp) {
+ override def isJava = true
+ override def equals(that: Any) = super.equals(that) && that.isInstanceOf[JavaMethodType]
+ override def computeHash = addDelta(super.computeHash, 1)
+ override protected def prefixString = "JavaMethodType"
+ }
+
+ final class ImplicitMethodType(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)
+ extends MethodType(paramNames, paramTypes)(resultTypeExp) {
+ override def isImplicit = true
+ override def equals(that: Any) = super.equals(that) && that.isInstanceOf[ImplicitMethodType]
+ override def computeHash = addDelta(super.computeHash, 2)
+ override protected def prefixString = "ImplicitMethodType"
+ }
+
+ abstract class MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context): MethodType
+ def apply(paramNames: List[TermName], paramTypes: List[Type], resultType: Type)(implicit ctx: Context): MethodType =
+ apply(paramNames, paramTypes)(_ => resultType)
+ def apply(paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context): MethodType =
+ apply(nme.syntheticParamNames(paramTypes.length), paramTypes)(resultTypeExp)
+ def apply(paramTypes: List[Type], resultType: Type)(implicit ctx: Context): MethodType =
+ apply(nme.syntheticParamNames(paramTypes.length), paramTypes, resultType)
+
+ /** Produce method type from parameter symbols, with special mappings for repeated
+ * and inline parameters.
+ */
+ def fromSymbols(params: List[Symbol], resultType: Type)(implicit ctx: Context) = {
+ /** Replace @repeated annotations on Seq or Array types by <repeated> types */
+ def translateRepeated(tp: Type): Type = tp match {
+ case tp @ ExprType(tp1) => tp.derivedExprType(translateRepeated(tp1))
+ case AnnotatedType(tp, annot) if annot matches defn.RepeatedAnnot =>
+ val typeSym = tp.typeSymbol.asClass
+ assert(typeSym == defn.SeqClass || typeSym == defn.ArrayClass)
+ tp.translateParameterized(typeSym, defn.RepeatedParamClass)
+ case tp =>
+ tp
+ }
+ /** Add @inlineParam to inline call-by-value parameters */
+ def translateInline(tp: Type): Type = tp match {
+ case _: ExprType => tp
+ case _ => AnnotatedType(tp, Annotation(defn.InlineParamAnnot))
+ }
+ def paramInfo(param: Symbol): Type = {
+ val paramType = translateRepeated(param.info)
+ if (param.is(Inline)) translateInline(paramType) else paramType
+ }
+ def transformResult(mt: MethodType) =
+ resultType.subst(params, (0 until params.length).toList map (MethodParam(mt, _)))
+ apply(params map (_.name.asTermName), params map paramInfo)(transformResult _)
+ }
+ }
+
+ object MethodType extends MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
+ unique(new CachedMethodType(paramNames, paramTypes)(resultTypeExp))
+
+ private type DependencyStatus = Byte
+ private final val Unknown: DependencyStatus = 0 // not yet computed
+ private final val NoDeps: DependencyStatus = 1 // no dependent parameters found
+ private final val FalseDeps: DependencyStatus = 2 // all dependent parameters are prefixes of non-depended alias types
+ private final val TrueDeps: DependencyStatus = 3 // some truly dependent parameters exist
+ private final val StatusMask: DependencyStatus = 3 // the bits indicating actual dependency status
+ private final val Provisional: DependencyStatus = 4 // set if dependency status can still change due to type variable instantiations
+ }
+
+ object JavaMethodType extends MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
+ unique(new JavaMethodType(paramNames, paramTypes)(resultTypeExp))
+ }
+
+ object ImplicitMethodType extends MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
+ unique(new ImplicitMethodType(paramNames, paramTypes)(resultTypeExp))
+ }
+
+ /** A by-name parameter type of the form `=> T`, or the type of a method with no parameter list. */
+ abstract case class ExprType(resType: Type)
+ extends CachedProxyType with TermType with MethodicType {
+ override def resultType(implicit ctx: Context): Type = resType
+ override def underlying(implicit ctx: Context): Type = resType
+ protected def computeSignature(implicit ctx: Context): Signature = resultSignature
+ def derivedExprType(resType: Type)(implicit ctx: Context) =
+ if (resType eq this.resType) this else ExprType(resType)
+ override def computeHash = doHash(resType)
+ }
+
+ final class CachedExprType(resultType: Type) extends ExprType(resultType)
+
+ object ExprType {
+ def apply(resultType: Type)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedExprType(resultType))
+ }
+ }
+
+ /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */
+ class PolyType(val paramNames: List[TypeName], val variances: List[Int])(
+ paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
+ extends CachedProxyType with BindingType with MethodOrPoly {
+
+ /** The bounds of the type parameters */
+ val paramBounds: List[TypeBounds] = paramBoundsExp(this)
+
+ /** The result type of a PolyType / body of a type lambda */
+ val resType: Type = resultTypeExp(this)
+
+ assert(resType.isInstanceOf[TermType], this)
+ assert(paramNames.nonEmpty)
+
+ protected def computeSignature(implicit ctx: Context) = resultSignature
+
+ def isPolymorphicMethodType: Boolean = resType match {
+ case _: MethodType => true
+ case _ => false
+ }
+
+ /** Is this polytype a higher-kinded type lambda as opposed to a polymorphic?
+ * method type? Only type lambdas get created with variances, that's how we can tell.
+ */
+ def isTypeLambda: Boolean = variances.nonEmpty
+
+ /** PolyParam references to all type parameters of this type */
+ lazy val paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _))
+
+ lazy val typeParams: List[LambdaParam] =
+ paramNames.indices.toList.map(new LambdaParam(this, _))
+
+ override def resultType(implicit ctx: Context) = resType
+ override def underlying(implicit ctx: Context) = resType
+
+ /** Instantiate result type by substituting parameters with given arguments */
+ final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type =
+ resultType.substParams(this, argTypes)
+
+ /** Instantiate parameter bounds by substituting parameters with given arguments */
+ final def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] =
+ paramBounds.mapConserve(_.substParams(this, argTypes).bounds)
+
+ def newLikeThis(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): PolyType =
+ PolyType.apply(paramNames, variances)(
+ x => paramBounds mapConserve (_.subst(this, x).bounds),
+ x => resType.subst(this, x))
+
+ def derivedPolyType(paramNames: List[TypeName] = this.paramNames,
+ paramBounds: List[TypeBounds] = this.paramBounds,
+ resType: Type = this.resType)(implicit ctx: Context) =
+ if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this
+ else newLikeThis(paramNames, paramBounds, resType)
+
+ def derivedLambdaAbstraction(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type =
+ resType match {
+ case resType @ TypeAlias(alias) =>
+ resType.derivedTypeAlias(newLikeThis(paramNames, paramBounds, alias))
+ case resType @ TypeBounds(lo, hi) =>
+ resType.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else newLikeThis(paramNames, paramBounds, lo),
+ newLikeThis(paramNames, paramBounds, hi))
+ case _ =>
+ derivedPolyType(paramNames, paramBounds, resType)
+ }
+
+ /** Merge nested polytypes into one polytype. nested polytypes are normally not supported
+ * but can arise as temporary data structures.
+ */
+ def flatten(implicit ctx: Context): PolyType = resType match {
+ case that: PolyType =>
+ val shift = new TypeMap {
+ def apply(t: Type) = t match {
+ case PolyParam(`that`, n) => PolyParam(that, n + paramNames.length)
+ case t => mapOver(t)
+ }
+ }
+ PolyType(paramNames ++ that.paramNames)(
+ x => this.paramBounds.mapConserve(_.subst(this, x).bounds) ++
+ that.paramBounds.mapConserve(shift(_).subst(that, x).bounds),
+ x => shift(that.resultType).subst(that, x).subst(this, x))
+ case _ => this
+ }
+
+ /** The type `[tparams := paramRefs] tp`, where `tparams` can be
+ * either a list of type parameter symbols or a list of lambda parameters
+ */
+ def lifted(tparams: List[TypeParamInfo], tp: Type)(implicit ctx: Context): Type =
+ tparams match {
+ case LambdaParam(poly, _) :: _ => tp.subst(poly, this)
+ case tparams: List[Symbol @unchecked] => tp.subst(tparams, paramRefs)
+ }
+
+ override def equals(other: Any) = other match {
+ case other: PolyType =>
+ other.paramNames == this.paramNames &&
+ other.paramBounds == this.paramBounds &&
+ other.resType == this.resType &&
+ other.variances == this.variances
+ case _ => false
+ }
+
+ override def toString = s"PolyType($variances, $paramNames, $paramBounds, $resType)"
+
+ override def computeHash = doHash(variances ::: paramNames, resType, paramBounds)
+ }
+
+ object PolyType {
+ def apply(paramNames: List[TypeName], variances: List[Int] = Nil)(
+ paramBoundsExp: PolyType => List[TypeBounds],
+ resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = {
+ val vs = if (variances.isEmpty) paramNames.map(alwaysZero) else variances
+ unique(new PolyType(paramNames, vs)(paramBoundsExp, resultTypeExp))
+ }
+
+ def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] =
+ Some((tl.typeParams, tl.resType))
+
+ def any(n: Int)(implicit ctx: Context) =
+ apply(tpnme.syntheticTypeParamNames(n), List.fill(n)(0))(
+ pt => List.fill(n)(TypeBounds.empty), pt => defn.AnyType)
+ }
+
+ // ----- HK types: LambdaParam, HKApply ---------------------
+
+ /** The parameter of a type lambda */
+ case class LambdaParam(tl: PolyType, n: Int) extends TypeParamInfo {
+ def isTypeParam(implicit ctx: Context) = true
+ def paramName(implicit ctx: Context): TypeName = tl.paramNames(n)
+ def paramBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n)
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = paramBounds
+ def paramVariance(implicit ctx: Context): Int = tl.variances(n)
+ def toArg: Type = PolyParam(tl, n)
+ def paramRef(implicit ctx: Context): Type = PolyParam(tl, n)
+ }
+
+ /** A higher kinded type application `C[T_1, ..., T_n]` */
+ abstract case class HKApply(tycon: Type, args: List[Type])
+ extends CachedProxyType with ValueType {
+
+ private var validSuper: Period = Nowhere
+ private var cachedSuper: Type = _
+
+ override def underlying(implicit ctx: Context): Type = tycon
+
+ override def superType(implicit ctx: Context): Type = {
+ if (ctx.period != validSuper) {
+ cachedSuper = tycon match {
+ case tp: PolyType => defn.AnyType
+ case tp: TypeVar if !tp.inst.exists =>
+ // supertype not stable, since underlying might change
+ return tp.underlying.applyIfParameterized(args)
+ case tp: TypeProxy => tp.superType.applyIfParameterized(args)
+ case _ => defn.AnyType
+ }
+ validSuper = ctx.period
+ }
+ cachedSuper
+ }
+
+ def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match {
+ case tycon: TypeRef =>
+ tycon.info match {
+ case TypeBounds(lo, hi) =>
+ if (lo eq hi) superType // optimization, can profit from caching in this case
+ else lo.applyIfParameterized(args)
+ case _ => NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ def typeParams(implicit ctx: Context): List[TypeParamInfo] = {
+ val tparams = tycon.typeParams
+ if (tparams.isEmpty) PolyType.any(args.length).typeParams else tparams
+ }
+
+ def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type =
+ if ((tycon eq this.tycon) && (args eq this.args)) this
+ else tycon.appliedTo(args)
+
+ override def computeHash = doHash(tycon, args)
+
+ protected def checkInst(implicit ctx: Context): this.type = {
+ def check(tycon: Type): Unit = tycon.stripTypeVar match {
+ case tycon: TypeRef if !tycon.symbol.isClass =>
+ case _: PolyParam | ErrorType | _: WildcardType =>
+ case _: PolyType =>
+ assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this")
+ case tycon: AnnotatedType =>
+ check(tycon.underlying)
+ case _ =>
+ assert(false, s"illegal type constructor in $this")
+ }
+ if (Config.checkHKApplications) check(tycon)
+ this
+ }
+ }
+
+ final class CachedHKApply(tycon: Type, args: List[Type]) extends HKApply(tycon, args)
+
+ object HKApply {
+ def apply(tycon: Type, args: List[Type])(implicit ctx: Context) =
+ unique(new CachedHKApply(tycon, args)).checkInst
+ }
+
+ // ----- Bound types: MethodParam, PolyParam --------------------------
+
+ abstract class BoundType extends CachedProxyType with ValueType {
+ type BT <: Type
+ def binder: BT
+ // Dotty deviation: copyBoundType was copy, but
+ // dotty generates copy methods always automatically, and therefore
+ // does not accept same-named method definitions in subclasses.
+ // Scala2x, on the other hand, requires them (not sure why!)
+ def copyBoundType(bt: BT): Type
+ }
+
+ abstract class ParamType extends BoundType {
+ def paramNum: Int
+ def paramName: Name
+ }
+
+ abstract case class MethodParam(binder: MethodType, paramNum: Int) extends ParamType with SingletonType {
+ type BT = MethodType
+
+ def paramName = binder.paramNames(paramNum)
+
+ override def underlying(implicit ctx: Context): Type = binder.paramTypes(paramNum)
+ def copyBoundType(bt: BT) = new MethodParamImpl(bt, paramNum)
+
+ // need to customize hashCode and equals to prevent infinite recursion for dep meth types.
+ override def computeHash = addDelta(binder.identityHash, paramNum)
+ override def equals(that: Any) = that match {
+ case that: MethodParam =>
+ (this.binder eq that.binder) && this.paramNum == that.paramNum
+ case _ =>
+ false
+ }
+
+ override def toString = s"MethodParam($paramName)"
+ }
+
+ class MethodParamImpl(binder: MethodType, paramNum: Int) extends MethodParam(binder, paramNum)
+
+ object MethodParam {
+ def apply(binder: MethodType, paramNum: Int)(implicit ctx: Context): MethodParam = {
+ assertUnerased()
+ new MethodParamImpl(binder, paramNum)
+ }
+ }
+
+ /** TODO Some docs would be nice here! */
+ case class PolyParam(binder: PolyType, paramNum: Int) extends ParamType {
+ type BT = PolyType
+ def copyBoundType(bt: BT) = PolyParam(bt, paramNum)
+
+ /** Looking only at the structure of `bound`, is one of the following true?
+ * - fromBelow and param <:< bound
+ * - !fromBelow and param >:> bound
+ */
+ def occursIn(bound: Type, fromBelow: Boolean)(implicit ctx: Context): Boolean = bound.stripTypeVar match {
+ case bound: PolyParam => bound == this
+ case bound: AndOrType =>
+ def occ1 = occursIn(bound.tp1, fromBelow)
+ def occ2 = occursIn(bound.tp2, fromBelow)
+ if (fromBelow == bound.isAnd) occ1 && occ2 else occ1 || occ2
+ case _ => false
+ }
+
+ def paramName = binder.paramNames(paramNum)
+
+ override def underlying(implicit ctx: Context): Type = {
+ val bounds = binder.paramBounds
+ if (bounds == null) NoType // this can happen if the referenced generic type is not initialized yet
+ else bounds(paramNum)
+ }
+ // no customized hashCode/equals needed because cycle is broken in PolyType
+ override def toString =
+ try s"PolyParam($paramName)"
+ catch {
+ case ex: IndexOutOfBoundsException => s"PolyParam(<bad index: $paramNum>)"
+ }
+
+ override def computeHash = doHash(paramNum, binder.identityHash)
+
+ override def equals(that: Any) = that match {
+ case that: PolyParam =>
+ (this.binder eq that.binder) && this.paramNum == that.paramNum
+ case _ =>
+ false
+ }
+ }
+
+ /** a self-reference to an enclosing recursive type. */
+ case class RecThis(binder: RecType) extends BoundType with SingletonType {
+ type BT = RecType
+ override def underlying(implicit ctx: Context) = binder
+ def copyBoundType(bt: BT) = RecThis(bt)
+
+ // need to customize hashCode and equals to prevent infinite recursion
+ // between RecTypes and RecRefs.
+ override def computeHash = addDelta(binder.identityHash, 41)
+ override def equals(that: Any) = that match {
+ case that: RecThis => this.binder eq that.binder
+ case _ => false
+ }
+ override def toString =
+ try s"RecThis(${binder.hashCode})"
+ catch {
+ case ex: NullPointerException => s"RecThis(<under construction>)"
+ }
+ }
+
+ // ----- Skolem types -----------------------------------------------
+
+ /** A skolem type reference with underlying type `binder`. */
+ abstract case class SkolemType(info: Type) extends UncachedProxyType with ValueType with SingletonType {
+ override def underlying(implicit ctx: Context) = info
+ def derivedSkolemType(info: Type)(implicit ctx: Context) =
+ if (info eq this.info) this else SkolemType(info)
+ override def hashCode: Int = identityHash
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ private var myRepr: String = null
+ def repr(implicit ctx: Context) = {
+ if (myRepr == null) myRepr = ctx.freshName("?")
+ myRepr
+ }
+
+ override def toString = s"Skolem($hashCode)"
+ }
+
+ final class CachedSkolemType(info: Type) extends SkolemType(info)
+
+ object SkolemType {
+ def apply(info: Type)(implicit ctx: Context) =
+ unique(new CachedSkolemType(info))
+ }
+
+ // ------------ Type variables ----------------------------------------
+
+ /** In a TypeApply tree, a TypeVar is created for each argument type to be inferred.
+ * Every type variable is referred to by exactly one inferred type parameter of some
+ * TypeApply tree.
+ *
+ * A type variable is essentially a switch that models some part of a substitution.
+ * It is first linked to `origin`, a poly param that's in the current constraint set.
+ * It can then be (once) instantiated to some other type. The instantiation is
+ * recorded in the type variable itself, or else, if the current type state
+ * is different from the variable's creation state (meaning unrolls are possible)
+ * in the current typer state.
+ *
+ * @param origin The parameter that's tracked by the type variable.
+ * @param creatorState The typer state in which the variable was created.
+ * @param owningTree The function part of the TypeApply tree tree that introduces
+ * the type variable.
+ * @paran owner The current owner if the context where the variable was created.
+ *
+ * `owningTree` and `owner` are used to determine whether a type-variable can be instantiated
+ * at some given point. See `Inferencing#interpolateUndetVars`.
+ */
+ final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree, val owner: Symbol) extends CachedProxyType with ValueType {
+
+ /** The permanent instance type of the variable, or NoType is none is given yet */
+ private[core] var inst: Type = NoType
+
+ /** The state owning the variable. This is at first `creatorState`, but it can
+ * be changed to an enclosing state on a commit.
+ */
+ private[core] var owningState = creatorState
+
+ /** The instance type of this variable, or NoType if the variable is currently
+ * uninstantiated
+ */
+ def instanceOpt(implicit ctx: Context): Type =
+ if (inst.exists) inst else {
+ ctx.typerState.ephemeral = true
+ ctx.typerState.instType(this)
+ }
+
+ /** Is the variable already instantiated? */
+ def isInstantiated(implicit ctx: Context) = instanceOpt.exists
+
+ /** Instantiate variable with given type */
+ private def instantiateWith(tp: Type)(implicit ctx: Context): Type = {
+ assert(tp ne this, s"self instantiation of ${tp.show}, constraint = ${ctx.typerState.constraint.show}")
+ typr.println(s"instantiating ${this.show} with ${tp.show}")
+ assert(ctx.typerState.constraint contains this) // !!! DEBUG
+ if ((ctx.typerState eq owningState) && !ctx.typeComparer.subtypeCheckInProgress)
+ inst = tp
+ ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp)
+ tp
+ }
+
+ /** Instantiate variable from the constraints over its `origin`.
+ * If `fromBelow` is true, the variable is instantiated to the lub
+ * of its lower bounds in the current constraint; otherwise it is
+ * instantiated to the glb of its upper bounds. However, a lower bound
+ * instantiation can be a singleton type only if the upper bound
+ * is also a singleton type.
+ */
+ def instantiate(fromBelow: Boolean)(implicit ctx: Context): Type = {
+ val inst = ctx.typeComparer.instanceType(origin, fromBelow)
+ if (ctx.typerState.isGlobalCommittable)
+ inst match {
+ case inst: PolyParam =>
+ assert(inst.binder.isTypeLambda, i"bad inst $this := $inst, constr = ${ctx.typerState.constraint}")
+ // If this fails, you might want to turn on Config.debugCheckConstraintsClosed
+ // to help find the root of the problem.
+ // Note: Parameters of type lambdas are excluded from the assertion because
+ // they might arise from ill-kinded code. See #1652
+ case _ =>
+ }
+ instantiateWith(inst)
+ }
+
+ /** Unwrap to instance (if instantiated) or origin (if not), until result
+ * is no longer a TypeVar
+ */
+ override def stripTypeVar(implicit ctx: Context): Type = {
+ val inst = instanceOpt
+ if (inst.exists) inst.stripTypeVar else origin
+ }
+
+ /** If the variable is instantiated, its instance, otherwise its origin */
+ override def underlying(implicit ctx: Context): Type = {
+ val inst = instanceOpt
+ if (inst.exists) inst
+ else {
+ ctx.typerState.ephemeral = true
+ origin
+ }
+ }
+
+ override def computeHash: Int = identityHash
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ override def toString = {
+ def instStr = if (inst.exists) s" -> $inst" else ""
+ s"TypeVar($origin$instStr)"
+ }
+ }
+
+ // ------ ClassInfo, Type Bounds ------------------------------------------------------------
+
+ /** Roughly: the info of a class during a period.
+ * @param prefix The prefix on which parents, decls, and selfType need to be rebased.
+ * @param cls The class symbol.
+ * @param classParents The parent types of this class.
+ * These are all normalized to be TypeRefs by moving any refinements
+ * to be member definitions of the class itself.
+ * @param decls The symbols defined directly in this class.
+ * @param selfInfo The type of `this` in this class, if explicitly given,
+ * NoType otherwise. If class is compiled from source, can also
+ * be a reference to the self symbol containing the type.
+ */
+ abstract case class ClassInfo(
+ prefix: Type,
+ cls: ClassSymbol,
+ classParents: List[TypeRef],
+ decls: Scope,
+ selfInfo: DotClass /* should be: Type | Symbol */) extends CachedGroundType with TypeType {
+
+ /** The self type of a class is the conjunction of
+ * - the explicit self type if given (or the info of a given self symbol), and
+ * - the fully applied reference to the class itself.
+ */
+ def selfType(implicit ctx: Context): Type = {
+ if (selfTypeCache == null)
+ selfTypeCache = {
+ def fullRef = fullyAppliedRef
+ val given = givenSelfType
+ val raw =
+ if (!given.exists) fullRef
+ else if (cls is Module) given
+ else if (ctx.erasedTypes) fullRef
+ else AndType(given, fullRef)
+ raw//.asSeenFrom(prefix, cls.owner)
+ }
+ selfTypeCache
+ }
+
+ /** The explicitly given self type (self types of modules are assumed to be
+ * explcitly given here).
+ */
+ override def givenSelfType(implicit ctx: Context): Type = selfInfo match {
+ case tp: Type => tp
+ case self: Symbol => self.info
+ }
+
+ private var selfTypeCache: Type = null
+
+ private def fullyAppliedRef(base: Type, tparams: List[TypeSymbol])(implicit ctx: Context): Type = tparams match {
+ case tparam :: tparams1 =>
+ fullyAppliedRef(
+ RefinedType(base, tparam.name, TypeRef(cls.thisType, tparam).toBounds(tparam)),
+ tparams1)
+ case nil =>
+ base
+ }
+
+ /** The class type with all type parameters */
+ def fullyAppliedRef(implicit ctx: Context): Type = fullyAppliedRef(cls.typeRef, cls.typeParams)
+
+ private var typeRefCache: TypeRef = null
+
+ def typeRef(implicit ctx: Context): TypeRef = {
+ def clsDenot = if (prefix eq cls.owner.thisType) cls.denot else cls.denot.copySymDenotation(info = this)
+ if (typeRefCache == null)
+ typeRefCache =
+ if ((cls is PackageClass) || cls.owner.isTerm) symbolicTypeRef
+ else TypeRef(prefix, cls.name, clsDenot)
+ typeRefCache
+ }
+
+ def symbolicTypeRef(implicit ctx: Context): TypeRef = TypeRef(prefix, cls)
+
+ // cached because baseType needs parents
+ private var parentsCache: List[TypeRef] = null
+
+ /** The parent type refs as seen from the given prefix */
+ override def parents(implicit ctx: Context): List[TypeRef] = {
+ if (parentsCache == null)
+ parentsCache = cls.classParents.mapConserve(_.asSeenFrom(prefix, cls.owner).asInstanceOf[TypeRef])
+ parentsCache
+ }
+
+ /** The parent types with all type arguments */
+ override def parentsWithArgs(implicit ctx: Context): List[Type] =
+ parents mapConserve { pref =>
+ ((pref: Type) /: pref.classSymbol.typeParams) { (parent, tparam) =>
+ val targSym = decls.lookup(tparam.name)
+ if (targSym.exists) RefinedType(parent, targSym.name, targSym.info)
+ else parent
+ }
+ }
+
+ def derivedClassInfo(prefix: Type)(implicit ctx: Context) =
+ if (prefix eq this.prefix) this
+ else ClassInfo(prefix, cls, classParents, decls, selfInfo)
+
+ def derivedClassInfo(prefix: Type = this.prefix, classParents: List[TypeRef] = classParents, decls: Scope = this.decls, selfInfo: DotClass = this.selfInfo)(implicit ctx: Context) =
+ if ((prefix eq this.prefix) && (classParents eq this.classParents) && (decls eq this.decls) && (selfInfo eq this.selfInfo)) this
+ else ClassInfo(prefix, cls, classParents, decls, selfInfo)
+
+ override def computeHash = doHash(cls, prefix)
+
+ override def toString = s"ClassInfo($prefix, $cls)"
+ }
+
+ class CachedClassInfo(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass)
+ extends ClassInfo(prefix, cls, classParents, decls, selfInfo)
+
+ /** A class for temporary class infos where `parents` are not yet known. */
+ final class TempClassInfo(prefix: Type, cls: ClassSymbol, decls: Scope, selfInfo: DotClass)
+ extends CachedClassInfo(prefix, cls, Nil, decls, selfInfo) {
+
+ /** A list of actions that were because they rely on the class info of `cls` to
+ * be no longer temporary. These actions will be performed once `cls` gets a real
+ * ClassInfo.
+ */
+ private var suspensions: List[() => Unit] = Nil
+
+ def addSuspension(suspension: () => Unit): Unit = suspensions ::= suspension
+
+ /** Install classinfo with known parents in `denot` and resume all suspensions */
+ def finalize(denot: SymDenotation, parents: List[TypeRef])(implicit ctx: Context) = {
+ denot.info = derivedClassInfo(classParents = parents)
+ suspensions.foreach(_())
+ }
+ }
+
+ object ClassInfo {
+ def apply(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass = NoType)(implicit ctx: Context) =
+ unique(new CachedClassInfo(prefix, cls, classParents, decls, selfInfo))
+ }
+
+ /** Type bounds >: lo <: hi */
+ abstract case class TypeBounds(lo: Type, hi: Type) extends CachedProxyType with TypeType {
+
+ assert(lo.isInstanceOf[TermType])
+ assert(hi.isInstanceOf[TermType])
+
+ def variance: Int = 0
+
+ override def underlying(implicit ctx: Context): Type = hi
+
+ /** The non-alias type bounds type with given bounds */
+ def derivedTypeBounds(lo: Type, hi: Type)(implicit ctx: Context) =
+ if ((lo eq this.lo) && (hi eq this.hi) && (variance == 0)) this
+ else TypeBounds(lo, hi)
+
+ /** If this is an alias, a derived alias with the new variance,
+ * Otherwise the type itself.
+ */
+ def withVariance(variance: Int)(implicit ctx: Context) = this match {
+ case tp: TypeAlias => tp.derivedTypeAlias(tp.alias, variance)
+ case _ => this
+ }
+
+ def contains(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp: TypeBounds => lo <:< tp.lo && tp.hi <:< hi
+ case tp: ClassInfo =>
+ // Note: Taking a normal typeRef does not work here. A normal ref might contain
+ // also other information about the named type (e.g. bounds).
+ contains(tp.symbolicTypeRef)
+ case _ => lo <:< tp && tp <:< hi
+ }
+
+ def & (that: TypeBounds)(implicit ctx: Context): TypeBounds =
+ if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) that
+ else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) this
+ else TypeBounds(this.lo | that.lo, this.hi & that.hi)
+
+ def | (that: TypeBounds)(implicit ctx: Context): TypeBounds =
+ if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) this
+ else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) that
+ else TypeBounds(this.lo & that.lo, this.hi | that.hi)
+
+ override def & (that: Type)(implicit ctx: Context) = that match {
+ case that: TypeBounds => this & that
+ case _ => super.& (that)
+ }
+
+ override def | (that: Type)(implicit ctx: Context) = that match {
+ case that: TypeBounds => this | that
+ case _ => super.| (that)
+ }
+
+ /** The implied bounds, where aliases are mapped to intervals from
+ * Nothing/Any
+ */
+ def boundsInterval(implicit ctx: Context): TypeBounds = this
+
+ /** If this type and that type have the same variance, this variance, otherwise 0 */
+ final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2
+
+ override def computeHash = doHash(variance, lo, hi)
+ override def equals(that: Any): Boolean = that match {
+ case that: TypeBounds =>
+ (this.lo eq that.lo) && (this.hi eq that.hi) && (this.variance == that.variance)
+ case _ =>
+ false
+ }
+
+ override def toString =
+ if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)"
+ }
+
+ class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
+
+ abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) {
+ /** pre: this is a type alias */
+ def derivedTypeAlias(alias: Type, variance: Int = this.variance)(implicit ctx: Context) =
+ if ((alias eq this.alias) && (variance == this.variance)) this
+ else TypeAlias(alias, variance)
+
+ override def & (that: TypeBounds)(implicit ctx: Context): TypeBounds = {
+ val v = this commonVariance that
+ if (v > 0) derivedTypeAlias(this.hi & that.hi, v)
+ else if (v < 0) derivedTypeAlias(this.lo | that.lo, v)
+ else super.& (that)
+ }
+
+ override def | (that: TypeBounds)(implicit ctx: Context): TypeBounds = {
+ val v = this commonVariance that
+ if (v > 0) derivedTypeAlias(this.hi | that.hi, v)
+ else if (v < 0) derivedTypeAlias(this.lo & that.lo, v)
+ else super.| (that)
+ }
+
+ override def boundsInterval(implicit ctx: Context): TypeBounds =
+ if (variance == 0) this
+ else if (variance < 0) TypeBounds.lower(alias)
+ else TypeBounds.upper(alias)
+ }
+
+ class CachedTypeAlias(alias: Type, variance: Int, hc: Int) extends TypeAlias(alias, variance) {
+ myHash = hc
+ }
+
+ object TypeBounds {
+ def apply(lo: Type, hi: Type)(implicit ctx: Context): TypeBounds =
+ unique(new RealTypeBounds(lo, hi))
+ def empty(implicit ctx: Context) = apply(defn.NothingType, defn.AnyType)
+ def upper(hi: Type)(implicit ctx: Context) = apply(defn.NothingType, hi)
+ def lower(lo: Type)(implicit ctx: Context) = apply(lo, defn.AnyType)
+ }
+
+ object TypeAlias {
+ def apply(alias: Type, variance: Int = 0)(implicit ctx: Context) =
+ ctx.uniqueTypeAliases.enterIfNew(alias, variance)
+ def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias)
+ }
+
+ // ----- Annotated and Import types -----------------------------------------------
+
+ /** An annotated type tpe @ annot */
+ case class AnnotatedType(tpe: Type, annot: Annotation)
+ extends UncachedProxyType with ValueType {
+ // todo: cache them? but this makes only sense if annotations and trees are also cached.
+ override def underlying(implicit ctx: Context): Type = tpe
+ def derivedAnnotatedType(tpe: Type, annot: Annotation) =
+ if ((tpe eq this.tpe) && (annot eq this.annot)) this
+ else AnnotatedType(tpe, annot)
+
+ override def stripTypeVar(implicit ctx: Context): Type =
+ derivedAnnotatedType(tpe.stripTypeVar, annot)
+ override def stripAnnots(implicit ctx: Context): Type = tpe.stripAnnots
+ }
+
+ object AnnotatedType {
+ def make(underlying: Type, annots: List[Annotation]) =
+ (underlying /: annots)(AnnotatedType(_, _))
+ }
+
+ // Special type objects and classes -----------------------------------------------------
+
+ /** The type of an erased array */
+ abstract case class JavaArrayType(elemType: Type) extends CachedGroundType with ValueType {
+ override def computeHash = doHash(elemType)
+ def derivedJavaArrayType(elemtp: Type)(implicit ctx: Context) =
+ if (elemtp eq this.elemType) this else JavaArrayType(elemtp)
+ }
+ final class CachedJavaArrayType(elemType: Type) extends JavaArrayType(elemType)
+ object JavaArrayType {
+ def apply(elemType: Type)(implicit ctx: Context) = unique(new CachedJavaArrayType(elemType))
+ }
+
+ /** The type of an import clause tree */
+ case class ImportType(expr: Tree) extends UncachedGroundType
+
+ /** Sentinel for "missing type" */
+ @sharable case object NoType extends CachedGroundType {
+ override def exists = false
+ override def computeHash = hashSeed
+ }
+
+ /** Missing prefix */
+ @sharable case object NoPrefix extends CachedGroundType {
+ override def computeHash = hashSeed
+ }
+
+ abstract class ErrorType extends UncachedGroundType with ValueType
+
+ object ErrorType extends ErrorType
+
+ /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */
+ object TryDynamicCallType extends ErrorType
+
+ /** Wildcard type, possibly with bounds */
+ abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType {
+ def derivedWildcardType(optBounds: Type)(implicit ctx: Context) =
+ if (optBounds eq this.optBounds) this
+ else if (!optBounds.exists) WildcardType
+ else WildcardType(optBounds.asInstanceOf[TypeBounds])
+ override def computeHash = doHash(optBounds)
+ }
+
+ final class CachedWildcardType(optBounds: Type) extends WildcardType(optBounds)
+
+ @sharable object WildcardType extends WildcardType(NoType) {
+ def apply(bounds: TypeBounds)(implicit ctx: Context) = unique(new CachedWildcardType(bounds))
+ }
+
+ /** An extractor for single abstract method types.
+ * A type is a SAM type if it is a reference to a class or trait, which
+ *
+ * - has a single abstract method with a method type (ExprType
+ * and PolyType not allowed!)
+ * - can be instantiated without arguments or with just () as argument.
+ *
+ * The pattern `SAMType(denot)` matches a SAM type, where `denot` is the
+ * denotation of the single abstract method as a member of the type.
+ */
+ object SAMType {
+ def zeroParamClass(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: ClassInfo =>
+ def zeroParams(tp: Type): Boolean = tp match {
+ case pt: PolyType => zeroParams(pt.resultType)
+ case mt: MethodType => mt.paramTypes.isEmpty && !mt.resultType.isInstanceOf[MethodType]
+ case et: ExprType => true
+ case _ => false
+ }
+ if ((tp.cls is Trait) || zeroParams(tp.cls.primaryConstructor.info)) tp // !!! needs to be adapted once traits have parameters
+ else NoType
+ case tp: TypeRef =>
+ zeroParamClass(tp.underlying)
+ case tp: RefinedType =>
+ zeroParamClass(tp.underlying)
+ case tp: TypeBounds =>
+ zeroParamClass(tp.underlying)
+ case tp: TypeVar =>
+ zeroParamClass(tp.underlying)
+ case _ =>
+ NoType
+ }
+ def isInstantiatable(tp: Type)(implicit ctx: Context): Boolean = zeroParamClass(tp) match {
+ case cinfo: ClassInfo =>
+ val tref = tp.narrow
+ val selfType = cinfo.selfType.asSeenFrom(tref, cinfo.cls)
+ tref <:< selfType
+ case _ =>
+ false
+ }
+ def unapply(tp: Type)(implicit ctx: Context): Option[SingleDenotation] =
+ if (isInstantiatable(tp)) {
+ val absMems = tp.abstractTermMembers
+ // println(s"absMems: ${absMems map (_.show) mkString ", "}")
+ if (absMems.size == 1)
+ absMems.head.info match {
+ case mt: MethodType if !mt.isDependent => Some(absMems.head)
+ case _ => None
+ }
+ else if (tp isRef defn.PartialFunctionClass)
+ // To maintain compatibility with 2.x, we treat PartialFunction specially,
+ // pretending it is a SAM type. In the future it would be better to merge
+ // Function and PartialFunction, have Function1 contain a isDefinedAt method
+ // def isDefinedAt(x: T) = true
+ // and overwrite that method whenever the function body is a sequence of
+ // case clauses.
+ absMems.find(_.symbol.name == nme.apply)
+ else None
+ }
+ else None
+ }
+
+ // ----- TypeMaps --------------------------------------------------------------------
+
+ abstract class TypeMap(implicit protected val ctx: Context) extends (Type => Type) { thisMap =>
+
+ protected def stopAtStatic = true
+
+ def apply(tp: Type): Type
+
+ protected var variance = 1
+
+ protected def derivedSelect(tp: NamedType, pre: Type): Type =
+ tp.derivedSelect(pre)
+ protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type): Type =
+ tp.derivedRefinedType(parent, tp.refinedName, info)
+ protected def derivedRecType(tp: RecType, parent: Type): Type =
+ tp.rebind(parent)
+ protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type =
+ tp.derivedTypeAlias(alias)
+ protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type =
+ tp.derivedTypeBounds(lo, hi)
+ protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type =
+ tp.derivedSuperType(thistp, supertp)
+ protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ tp.derivedAppliedType(tycon, args)
+ protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type =
+ tp.derivedAndOrType(tp1, tp2)
+ protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type =
+ tp.derivedAnnotatedType(underlying, annot)
+ protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type =
+ tp.derivedWildcardType(bounds)
+ protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type =
+ tp.derivedClassInfo(pre)
+ protected def derivedJavaArrayType(tp: JavaArrayType, elemtp: Type): Type =
+ tp.derivedJavaArrayType(elemtp)
+ protected def derivedMethodType(tp: MethodType, formals: List[Type], restpe: Type): Type =
+ tp.derivedMethodType(tp.paramNames, formals, restpe)
+ protected def derivedExprType(tp: ExprType, restpe: Type): Type =
+ tp.derivedExprType(restpe)
+ protected def derivedPolyType(tp: PolyType, pbounds: List[TypeBounds], restpe: Type): Type =
+ tp.derivedPolyType(tp.paramNames, pbounds, restpe)
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = {
+ implicit val ctx: Context = this.ctx // Dotty deviation: implicits need explicit type
+ tp match {
+ case tp: NamedType =>
+ if (stopAtStatic && tp.symbol.isStatic) tp
+ else derivedSelect(tp, this(tp.prefix))
+
+ case _: ThisType
+ | _: BoundType
+ | NoPrefix => tp
+
+ case tp: RefinedType =>
+ derivedRefinedType(tp, this(tp.parent), this(tp.refinedInfo))
+
+ case tp: TypeAlias =>
+ val saved = variance
+ variance = variance * tp.variance
+ val alias1 = this(tp.alias)
+ variance = saved
+ derivedTypeAlias(tp, alias1)
+
+ case tp: TypeBounds =>
+ variance = -variance
+ val lo1 = this(tp.lo)
+ variance = -variance
+ derivedTypeBounds(tp, lo1, this(tp.hi))
+
+ case tp: MethodType =>
+ def mapOverMethod = {
+ variance = -variance
+ val ptypes1 = tp.paramTypes mapConserve this
+ variance = -variance
+ derivedMethodType(tp, ptypes1, this(tp.resultType))
+ }
+ mapOverMethod
+
+ case tp: ExprType =>
+ derivedExprType(tp, this(tp.resultType))
+
+ case tp: PolyType =>
+ def mapOverPoly = {
+ variance = -variance
+ val bounds1 = tp.paramBounds.mapConserve(this).asInstanceOf[List[TypeBounds]]
+ variance = -variance
+ derivedPolyType(tp, bounds1, this(tp.resultType))
+ }
+ mapOverPoly
+
+ case tp: RecType =>
+ derivedRecType(tp, this(tp.parent))
+
+ case tp @ SuperType(thistp, supertp) =>
+ derivedSuperType(tp, this(thistp), this(supertp))
+
+ case tp: LazyRef =>
+ LazyRef(() => this(tp.ref))
+
+ case tp: ClassInfo =>
+ mapClassInfo(tp)
+
+ case tp: TypeVar =>
+ val inst = tp.instanceOpt
+ if (inst.exists) apply(inst) else tp
+
+ case tp: HKApply =>
+ def mapArg(arg: Type, tparam: TypeParamInfo): Type = {
+ val saved = variance
+ variance *= tparam.paramVariance
+ try this(arg)
+ finally variance = saved
+ }
+ derivedAppliedType(tp, this(tp.tycon),
+ tp.args.zipWithConserve(tp.typeParams)(mapArg))
+
+ case tp: AndOrType =>
+ derivedAndOrType(tp, this(tp.tp1), this(tp.tp2))
+
+ case tp: SkolemType =>
+ tp
+
+ case tp @ AnnotatedType(underlying, annot) =>
+ val underlying1 = this(underlying)
+ if (underlying1 eq underlying) tp
+ else derivedAnnotatedType(tp, underlying1, mapOver(annot))
+
+ case tp @ WildcardType =>
+ derivedWildcardType(tp, mapOver(tp.optBounds))
+
+ case tp: JavaArrayType =>
+ derivedJavaArrayType(tp, this(tp.elemType))
+
+ case tp: ProtoType =>
+ tp.map(this)
+
+ case _ =>
+ tp
+ }
+ }
+
+ private def treeTypeMap = new TreeTypeMap(typeMap = this)
+
+ def mapOver(syms: List[Symbol]): List[Symbol] = ctx.mapSymbols(syms, treeTypeMap)
+
+ def mapOver(scope: Scope): Scope = {
+ val elems = scope.toList
+ val elems1 = mapOver(elems)
+ if (elems1 eq elems) scope
+ else newScopeWith(elems1: _*)
+ }
+
+ def mapOver(annot: Annotation): Annotation =
+ annot.derivedAnnotation(mapOver(annot.tree))
+
+ def mapOver(tree: Tree): Tree = treeTypeMap(tree)
+
+ /** Can be overridden. By default, only the prefix is mapped. */
+ protected def mapClassInfo(tp: ClassInfo): Type =
+ derivedClassInfo(tp, this(tp.prefix))
+
+ def andThen(f: Type => Type): TypeMap = new TypeMap {
+ override def stopAtStatic = thisMap.stopAtStatic
+ def apply(tp: Type) = f(thisMap(tp))
+ }
+ }
+
+ /** A type map that maps also parents and self type of a ClassInfo */
+ abstract class DeepTypeMap(implicit ctx: Context) extends TypeMap {
+ override def mapClassInfo(tp: ClassInfo) = {
+ val prefix1 = this(tp.prefix)
+ val parents1 = (tp.parents mapConserve this).asInstanceOf[List[TypeRef]]
+ val selfInfo1 = tp.selfInfo match {
+ case selfInfo: Type => this(selfInfo)
+ case selfInfo => selfInfo
+ }
+ tp.derivedClassInfo(prefix1, parents1, tp.decls, selfInfo1)
+ }
+ }
+
+ @sharable object IdentityTypeMap extends TypeMap()(NoContext) {
+ override def stopAtStatic = true
+ def apply(tp: Type) = tp
+ }
+
+ abstract class ApproximatingTypeMap(implicit ctx: Context) extends TypeMap { thisMap =>
+ def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType) =
+ if (variance == 0) NoType
+ else apply(if (variance < 0) lo else hi)
+
+ override protected def derivedSelect(tp: NamedType, pre: Type) =
+ if (pre eq tp.prefix) tp
+ else tp.info match {
+ case TypeAlias(alias) => apply(alias) // try to heal by following aliases
+ case _ =>
+ if (pre.exists && !pre.isRef(defn.NothingClass) && variance > 0) tp.derivedSelect(pre)
+ else tp.info match {
+ case TypeBounds(lo, hi) => approx(lo, hi)
+ case _ => approx()
+ }
+ }
+ override protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type) =
+ if (parent.exists && info.exists) tp.derivedRefinedType(parent, tp.refinedName, info)
+ else approx(hi = parent)
+ override protected def derivedRecType(tp: RecType, parent: Type) =
+ if (parent.exists) tp.rebind(parent)
+ else approx()
+ override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) =
+ if (alias.exists) tp.derivedTypeAlias(alias)
+ else approx(NoType, TypeBounds.empty)
+ override protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type) =
+ if (lo.exists && hi.exists) tp.derivedTypeBounds(lo, hi)
+ else approx(NoType,
+ if (lo.exists) TypeBounds.lower(lo)
+ else if (hi.exists) TypeBounds.upper(hi)
+ else TypeBounds.empty)
+ override protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type) =
+ if (thistp.exists && supertp.exists) tp.derivedSuperType(thistp, supertp)
+ else NoType
+ override protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ if (tycon.exists && args.forall(_.exists)) tp.derivedAppliedType(tycon, args)
+ else approx() // This is rather coarse, but to do better is a bit complicated
+ override protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type) =
+ if (tp1.exists && tp2.exists) tp.derivedAndOrType(tp1, tp2)
+ else if (tp.isAnd) approx(hi = tp1 & tp2) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d
+ else approx(lo = tp1 & tp2)
+ override protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation) =
+ if (underlying.exists) tp.derivedAnnotatedType(underlying, annot)
+ else NoType
+ override protected def derivedWildcardType(tp: WildcardType, bounds: Type) =
+ if (bounds.exists) tp.derivedWildcardType(bounds)
+ else WildcardType
+ override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type =
+ if (pre.exists) tp.derivedClassInfo(pre)
+ else NoType
+ }
+
+ // ----- TypeAccumulators ----------------------------------------------------
+
+ abstract class TypeAccumulator[T](implicit protected val ctx: Context) extends ((T, Type) => T) {
+
+ protected def stopAtStatic = true
+
+ def apply(x: T, tp: Type): T
+
+ protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations
+
+ protected var variance = 1
+
+ protected def applyToPrefix(x: T, tp: NamedType) = {
+ val saved = variance
+ variance = 0
+ val result = this(x, tp.prefix)
+ variance = saved
+ result
+ }
+
+ def foldOver(x: T, tp: Type): T = tp match {
+ case tp: TypeRef =>
+ if (stopAtStatic && tp.symbol.isStatic) x
+ else {
+ val tp1 = tp.prefix.lookupRefined(tp.name)
+ if (tp1.exists) this(x, tp1) else applyToPrefix(x, tp)
+ }
+ case tp: TermRef =>
+ if (stopAtStatic && tp.currentSymbol.isStatic) x
+ else applyToPrefix(x, tp)
+
+ case _: ThisType
+ | _: BoundType
+ | NoPrefix => x
+
+ case tp: RefinedType =>
+ this(this(x, tp.parent), tp.refinedInfo)
+
+ case bounds @ TypeBounds(lo, hi) =>
+ if (lo eq hi) {
+ val saved = variance
+ variance = variance * bounds.variance
+ val result = this(x, lo)
+ variance = saved
+ result
+ }
+ else {
+ variance = -variance
+ val y = this(x, lo)
+ variance = -variance
+ this(y, hi)
+ }
+
+ case tp @ MethodType(pnames, ptypes) =>
+ variance = -variance
+ val y = foldOver(x, ptypes)
+ variance = -variance
+ this(y, tp.resultType)
+
+ case ExprType(restpe) =>
+ this(x, restpe)
+
+ case tp: PolyType =>
+ variance = -variance
+ val y = foldOver(x, tp.paramBounds)
+ variance = -variance
+ this(y, tp.resultType)
+
+ case tp: RecType =>
+ this(x, tp.parent)
+
+ case SuperType(thistp, supertp) =>
+ this(this(x, thistp), supertp)
+
+ case tp @ ClassInfo(prefix, _, _, _, _) =>
+ this(x, prefix)
+
+ case tp @ HKApply(tycon, args) =>
+ def foldArgs(x: T, tparams: List[TypeParamInfo], args: List[Type]): T =
+ if (args.isEmpty) {
+ assert(tparams.isEmpty)
+ x
+ }
+ else {
+ val tparam = tparams.head
+ val saved = variance
+ variance *= tparam.paramVariance
+ val acc =
+ try this(x, args.head)
+ finally variance = saved
+ foldArgs(acc, tparams.tail, args.tail)
+ }
+ foldArgs(this(x, tycon), tp.typeParams, args)
+
+ case tp: AndOrType =>
+ this(this(x, tp.tp1), tp.tp2)
+
+ case tp: SkolemType =>
+ this(x, tp.info)
+
+ case AnnotatedType(underlying, annot) =>
+ this(applyToAnnot(x, annot), underlying)
+
+ case tp: TypeVar =>
+ this(x, tp.underlying)
+
+ case tp: WildcardType =>
+ this(x, tp.optBounds)
+
+ case tp: JavaArrayType =>
+ this(x, tp.elemType)
+
+ case tp: LazyRef =>
+ this(x, tp.ref)
+
+ case tp: ProtoType =>
+ tp.fold(x, this)
+
+ case _ => x
+ }
+
+ final def foldOver(x: T, ts: List[Type]): T = ts match {
+ case t :: ts1 => foldOver(apply(x, t), ts1)
+ case nil => x
+ }
+ }
+
+ abstract class TypeTraverser(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ def traverse(tp: Type): Unit
+ def apply(x: Unit, tp: Type): Unit = traverse(tp)
+ protected def traverseChildren(tp: Type) = foldOver((), tp)
+ }
+
+ class ExistsAccumulator(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
+ override def stopAtStatic = false
+ def apply(x: Boolean, tp: Type) =
+ x || p(tp) || (forceLazy || !tp.isInstanceOf[LazyRef]) && foldOver(x, tp)
+ }
+
+ class ForeachAccumulator(p: Type => Unit, override val stopAtStatic: Boolean)(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp)
+ }
+
+ class HasUnsafeNonAccumulator(implicit ctx: Context) extends TypeAccumulator[Boolean] {
+ def apply(x: Boolean, tp: Type) = x || tp.isUnsafeNonvariant || foldOver(x, tp)
+ }
+
+ class NamedPartsAccumulator(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context) extends TypeAccumulator[mutable.Set[NamedType]] {
+ override def stopAtStatic = false
+ def maybeAdd(x: mutable.Set[NamedType], tp: NamedType) = if (p(tp)) x += tp else x
+ val seen: mutable.Set[Type] = mutable.Set()
+ def apply(x: mutable.Set[NamedType], tp: Type): mutable.Set[NamedType] =
+ if (seen contains tp) x
+ else {
+ seen += tp
+ tp match {
+ case tp: TermRef =>
+ apply(foldOver(maybeAdd(x, tp), tp), tp.underlying)
+ case tp: TypeRef =>
+ foldOver(maybeAdd(x, tp), tp)
+ case TypeBounds(lo, hi) =>
+ if (!excludeLowerBounds) apply(x, lo)
+ apply(x, hi)
+ case tp: ThisType =>
+ apply(x, tp.tref)
+ case tp: ConstantType =>
+ apply(x, tp.underlying)
+ case tp: MethodParam =>
+ apply(x, tp.underlying)
+ case tp: PolyParam =>
+ apply(x, tp.underlying)
+ case _ =>
+ foldOver(x, tp)
+ }
+ }
+ }
+
+ // ----- Name Filters --------------------------------------------------
+
+ /** A name filter selects or discards a member name of a type `pre`.
+ * To enable efficient caching, name filters have to satisfy the
+ * following invariant: If `keep` is a name filter, and `pre` has
+ * class `C` as a base class, then
+ *
+ * keep(pre, name) implies keep(C.this, name)
+ */
+ abstract class NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean
+ }
+
+ /** A filter for names of abstract types of a given type */
+ object abstractTypeNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTypeName && {
+ val mbr = pre.nonPrivateMember(name)
+ (mbr.symbol is Deferred) && mbr.info.isInstanceOf[RealTypeBounds]
+ }
+ }
+
+ /** A filter for names of abstract types of a given type */
+ object nonClassTypeNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTypeName && {
+ val mbr = pre.member(name)
+ mbr.symbol.isType && !mbr.symbol.isClass
+ }
+ }
+
+ /** A filter for names of deferred term definitions of a given type */
+ object abstractTermNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTermName && pre.nonPrivateMember(name).hasAltWith(_.symbol is Deferred)
+ }
+
+ object typeNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = name.isTypeName
+ }
+
+ object fieldFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTermName && (pre member name).hasAltWith(!_.symbol.is(Method))
+ }
+
+ object takeAllFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = true
+ }
+
+ object implicitFilter extends NameFilter {
+ /** A dummy filter method.
+ * Implicit filtering is handled specially in computeMemberNames, so
+ * no post-filtering is needed.
+ */
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = true
+ }
+
+ // ----- Exceptions -------------------------------------------------------------
+
+ class TypeError(msg: String) extends Exception(msg)
+
+ class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])
+ extends TypeError(
+ s"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}")
+
+ class MissingType(pre: Type, name: Name)(implicit ctx: Context) extends TypeError(
+ i"""cannot resolve reference to type $pre.$name
+ |the classfile defining the type might be missing from the classpath${otherReason(pre)}""") {
+ if (ctx.debug) printStackTrace()
+ }
+
+ private def otherReason(pre: Type)(implicit ctx: Context): String = pre match {
+ case pre: ThisType if pre.givenSelfType.exists =>
+ i"\nor the self type of $pre might not contain all transitive dependencies"
+ case _ => ""
+ }
+
+ class CyclicReference private (val denot: SymDenotation)
+ extends TypeError(s"cyclic reference involving $denot") {
+ def show(implicit ctx: Context) = s"cyclic reference involving ${denot.show}"
+ }
+
+ object CyclicReference {
+ def apply(denot: SymDenotation)(implicit ctx: Context): CyclicReference = {
+ val ex = new CyclicReference(denot)
+ if (!(ctx.mode is Mode.CheckCyclic)) {
+ cyclicErrors.println(ex.getMessage)
+ for (elem <- ex.getStackTrace take 200)
+ cyclicErrors.println(elem.toString)
+ }
+ ex
+ }
+ }
+
+ class MergeError(msg: String, val tp1: Type, val tp2: Type) extends TypeError(msg)
+
+ // ----- Debug ---------------------------------------------------------
+
+ @sharable var debugTrace = false
+
+ val watchList = List[String](
+ ) map (_.toTypeName)
+
+ def isWatched(tp: Type) = tp match {
+ case TypeRef(_, name) => watchList contains name
+ case _ => false
+ }
+
+ // ----- Decorator implicits --------------------------------------------
+
+ implicit def decorateTypeApplications(tpe: Type): TypeApplications = new TypeApplications(tpe)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala
new file mode 100644
index 000000000..cb9670c69
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala
@@ -0,0 +1,128 @@
+package dotty.tools.dotc
+package core
+
+import Types._, Contexts._, util.Stats._, Hashable._, Names._
+import config.Config
+import util.HashSet
+
+/** Defines operation `unique` for hash-consing types.
+ * Also defines specialized hash sets for hash consing uniques of a specific type.
+ * All sets offer a `enterIfNew` method which checks whether a type
+ * with the given parts exists already and creates a new one if not.
+ */
+object Uniques {
+
+ private def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass)
+ private def recordCaching(h: Int, clazz: Class[_]): Unit =
+ if (h == NotCached) {
+ record("uncached-types")
+ record(s"uncached: $clazz")
+ } else {
+ record("cached-types")
+ record(s"cached: $clazz")
+ }
+
+ def unique[T <: Type](tp: T)(implicit ctx: Context): T = {
+ if (monitored) recordCaching(tp)
+ if (tp.hash == NotCached) tp
+ else if (monitored) {
+ val size = ctx.uniques.size
+ val result = ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T]
+ if (ctx.uniques.size > size) record(s"fresh unique ${tp.getClass}")
+ result
+ } else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T]
+ } /* !!! DEBUG
+ ensuring (
+ result => tp.toString == result.toString || {
+ println(s"cache mismatch; tp = $tp, cached = $result")
+ false
+ }
+ )
+ */
+
+ final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity) with Hashable {
+ override def hash(x: NamedType): Int = x.hash
+
+ private def findPrevious(h: Int, prefix: Type, name: Name): NamedType = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.prefix eq prefix) && (e.name eq name)) return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(prefix: Type, name: Name): NamedType = {
+ val h = doHash(name, prefix)
+ if (monitored) recordCaching(h, classOf[CachedTermRef])
+ def newType =
+ if (name.isTypeName) new CachedTypeRef(prefix, name.asTypeName, h)
+ else new CachedTermRef(prefix, name.asTermName, h)
+ if (h == NotCached) newType
+ else {
+ val r = findPrevious(h, prefix, name)
+ if (r ne null) r else addEntryAfterScan(newType)
+ }
+ }
+ }
+
+ final class TypeAliasUniques extends HashSet[TypeAlias](Config.initialUniquesCapacity) with Hashable {
+ override def hash(x: TypeAlias): Int = x.hash
+
+ private def findPrevious(h: Int, alias: Type, variance: Int): TypeAlias = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.alias eq alias) && (e.variance == variance)) return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(alias: Type, variance: Int): TypeAlias = {
+ val h = doHash(variance, alias)
+ if (monitored) recordCaching(h, classOf[TypeAlias])
+ def newAlias = new CachedTypeAlias(alias, variance, h)
+ if (h == NotCached) newAlias
+ else {
+ val r = findPrevious(h, alias, variance)
+ if (r ne null) r
+ else addEntryAfterScan(newAlias)
+ }
+ }
+ }
+
+ final class RefinedUniques extends HashSet[RefinedType](Config.initialUniquesCapacity) with Hashable {
+ override val hashSeed = classOf[CachedRefinedType].hashCode // some types start life as CachedRefinedTypes, need to have same hash seed
+ override def hash(x: RefinedType): Int = x.hash
+
+ private def findPrevious(h: Int, parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.parent eq parent) && (e.refinedName eq refinedName) && (e.refinedInfo eq refinedInfo))
+ return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
+ val h = doHash(refinedName, refinedInfo, parent)
+ def newType = new CachedRefinedType(parent, refinedName, refinedInfo, h)
+ if (monitored) recordCaching(h, classOf[CachedRefinedType])
+ if (h == NotCached) newType
+ else {
+ val r = findPrevious(h, parent, refinedName, refinedInfo)
+ if (r ne null) r else addEntryAfterScan(newType)
+ }
+ }
+
+ def enterIfNew(rt: RefinedType) = {
+ if (monitored) recordCaching(rt)
+ if (rt.hash == NotCached) rt
+ else {
+ val r = findPrevious(rt.hash, rt.parent, rt.refinedName, rt.refinedInfo)
+ if (r ne null) r else addEntryAfterScan(rt)
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala
new file mode 100644
index 000000000..cad3a4132
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala
@@ -0,0 +1,88 @@
+package dotty.tools
+package dotc
+package core
+package classfile
+
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+
+import io.AbstractFile
+
+/**
+ * This class reads files byte per byte. Only used by ClassFileParser
+ *
+ * @author Philippe Altherr
+ * @version 1.0, 23/03/2004
+ */
+class AbstractFileReader(val file: AbstractFile) {
+
+ /** the buffer containing the file
+ */
+ val buf: Array[Byte] = file.toByteArray
+
+ /** the current input pointer
+ */
+ var bp: Int = 0
+
+ /** return byte at offset 'pos'
+ */
+ @throws(classOf[IndexOutOfBoundsException])
+ def byteAt(pos: Int): Byte = buf(pos)
+
+ /** read a byte
+ */
+ @throws(classOf[IndexOutOfBoundsException])
+ def nextByte: Byte = {
+ val b = buf(bp)
+ bp += 1
+ b
+ }
+
+ /** read some bytes
+ */
+ def nextBytes(len: Int): Array[Byte] = {
+ bp += len
+ buf.slice(bp - len, bp)
+ }
+
+ /** read a character
+ */
+ def nextChar: Char =
+ (((nextByte & 0xff) << 8) + (nextByte & 0xff)).toChar
+
+ /** read an integer
+ */
+ def nextInt: Int =
+ ((nextByte & 0xff) << 24) + ((nextByte & 0xff) << 16) +
+ ((nextByte & 0xff) << 8) + (nextByte & 0xff)
+
+
+ /** extract a character at position bp from buf
+ */
+ def getChar(mybp: Int): Char =
+ (((buf(mybp) & 0xff) << 8) + (buf(mybp + 1) & 0xff)).toChar
+
+ /** extract an integer at position bp from buf
+ */
+ def getInt(mybp: Int): Int =
+ ((buf(mybp ) & 0xff) << 24) + ((buf(mybp + 1) & 0xff) << 16) +
+ ((buf(mybp + 2) & 0xff) << 8) + (buf(mybp + 3) & 0xff)
+
+ /** extract a long integer at position bp from buf
+ */
+ def getLong(mybp: Int): Long =
+ (getInt(mybp).toLong << 32) + (getInt(mybp + 4) & 0xffffffffL)
+
+ /** extract a float at position bp from buf
+ */
+ def getFloat(mybp: Int): Float = intBitsToFloat(getInt(mybp))
+
+ /** extract a double at position bp from buf
+ */
+ def getDouble(mybp: Int): Double = longBitsToDouble(getLong(mybp))
+
+ /** skip next 'n' bytes
+ */
+ def skip(n: Int): Unit = { bp += n }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala
new file mode 100644
index 000000000..badd9e560
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala
@@ -0,0 +1,221 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package dotty.tools.dotc.core.classfile
+
+object ByteCodecs {
+
+ def avoidZero(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ var count = 0
+ while (i < srclen) {
+ if (src(i) == 0x7f) count += 1
+ i += 1
+ }
+ val dst = new Array[Byte](srclen + count)
+ i = 0
+ var j = 0
+ while (i < srclen) {
+ val in = src(i)
+ if (in == 0x7f) {
+ dst(j) = (0xc0).toByte
+ dst(j + 1) = (0x80).toByte
+ j += 2
+ } else {
+ dst(j) = (in + 1).toByte
+ j += 1
+ }
+ i += 1
+ }
+ dst
+ }
+
+ def regenerateZero(src: Array[Byte]): Int = {
+ var i = 0
+ val srclen = src.length
+ var j = 0
+ while (i < srclen) {
+ val in: Int = src(i) & 0xff
+ if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
+ src(j) = 0x7f
+ i += 2
+ } else if (in == 0) {
+ src(j) = 0x7f
+ i += 1
+ } else {
+ src(j) = (in - 1).toByte
+ i += 1
+ }
+ j += 1
+ }
+ j
+ }
+
+ def encode8to7(src: Array[Byte]): Array[Byte] = {
+ val srclen = src.length
+ val dstlen = (srclen * 8 + 6) / 7
+ val dst = new Array[Byte](dstlen)
+ var i = 0
+ var j = 0
+ while (i + 6 < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte
+ var out: Int = in >>> 7
+ in = src(i + 1) & 0xff
+ dst(j + 1) = (out | (in << 1) & 0x7f).toByte
+ out = in >>> 6
+ in = src(i + 2) & 0xff
+ dst(j + 2) = (out | (in << 2) & 0x7f).toByte
+ out = in >>> 5
+ in = src(i + 3) & 0xff
+ dst(j + 3) = (out | (in << 3) & 0x7f).toByte
+ out = in >>> 4
+ in = src(i + 4) & 0xff
+ dst(j + 4) = (out | (in << 4) & 0x7f).toByte
+ out = in >>> 3
+ in = src(i + 5) & 0xff
+ dst(j + 5) = (out | (in << 5) & 0x7f).toByte
+ out = in >>> 2
+ in = src(i + 6) & 0xff
+ dst(j + 6) = (out | (in << 6) & 0x7f).toByte
+ out = in >>> 1
+ dst(j + 7) = out.toByte
+ i += 7
+ j += 8
+ }
+ if (i < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte; j += 1
+ var out: Int = in >>> 7
+ if (i + 1 < srclen) {
+ in = src(i + 1) & 0xff
+ dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
+ out = in >>> 6
+ if (i + 2 < srclen) {
+ in = src(i + 2) & 0xff
+ dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
+ out = in >>> 5
+ if (i + 3 < srclen) {
+ in = src(i + 3) & 0xff
+ dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
+ out = in >>> 4
+ if (i + 4 < srclen) {
+ in = src(i + 4) & 0xff
+ dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
+ out = in >>> 3
+ if (i + 5 < srclen) {
+ in = src(i + 5) & 0xff
+ dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
+ out = in >>> 2
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) dst(j) = out.toByte
+ }
+ dst
+ }
+
+ def decode7to8(src: Array[Byte], srclen: Int): Int = {
+ var i = 0
+ var j = 0
+ val dstlen = (srclen * 7 + 7) / 8
+ while (i + 7 < srclen) {
+ var out: Int = src(i)
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte
+ out = in >>> 1
+ in = src(i + 2)
+ src(j + 1) = (out | (in & 0x03) << 6).toByte
+ out = in >>> 2
+ in = src(i + 3)
+ src(j + 2) = (out | (in & 0x07) << 5).toByte
+ out = in >>> 3
+ in = src(i + 4)
+ src(j + 3) = (out | (in & 0x0f) << 4).toByte
+ out = in >>> 4
+ in = src(i + 5)
+ src(j + 4) = (out | (in & 0x1f) << 3).toByte
+ out = in >>> 5
+ in = src(i + 6)
+ src(j + 5) = (out | (in & 0x3f) << 2).toByte
+ out = in >>> 6
+ in = src(i + 7)
+ src(j + 6) = (out | in << 1).toByte
+ i += 8
+ j += 7
+ }
+ if (i < srclen) {
+ var out: Int = src(i)
+ if (i + 1 < srclen) {
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte; j += 1
+ out = in >>> 1
+ if (i + 2 < srclen) {
+ in = src(i + 2)
+ src(j) = (out | (in & 0x03) << 6).toByte; j += 1
+ out = in >>> 2
+ if (i + 3 < srclen) {
+ in = src(i + 3)
+ src(j) = (out | (in & 0x07) << 5).toByte; j += 1
+ out = in >>> 3
+ if (i + 4 < srclen) {
+ in = src(i + 4)
+ src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
+ out = in >>> 4
+ if (i + 5 < srclen) {
+ in = src(i + 5)
+ src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
+ out = in >>> 5
+ if (i + 6 < srclen) {
+ in = src(i + 6)
+ src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
+ out = in >>> 6
+ }
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) src(j) = out.toByte
+ }
+ dstlen
+ }
+
+ def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
+
+ /**
+ * Destructively decodes array xs and returns the length of the decoded array.
+ *
+ * Sometimes returns (length + 1) of the decoded array. Example:
+ *
+ * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * enc: Array[Byte] = Array(2, 5, 13, 1)
+ *
+ * scala> reflect.generic.ByteCodecs.decode(enc)
+ * res43: Int = 4
+ *
+ * scala> enc
+ * res44: Array[Byte] = Array(1, 2, 3, 0)
+ *
+ * However, this does not always happen.
+ */
+ def decode(xs: Array[Byte]): Int = {
+ val len = regenerateZero(xs)
+ decode7to8(xs, len)
+ }
+}
+
+
+
+
+
+
+
+
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
new file mode 100644
index 000000000..dd29fa49d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
@@ -0,0 +1,378 @@
+package dotty.tools.dotc
+package core
+package classfile
+
+import scala.annotation.switch
+
+object ClassfileConstants {
+
+ final val JAVA_MAGIC = 0xCAFEBABE
+ final val JAVA_MAJOR_VERSION = 45
+ final val JAVA_MINOR_VERSION = 3
+
+ /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html)
+ *
+ * If the `ACC_INTERFACE` flag is set, the `ACC_ABSTRACT` flag must also
+ * be set (ch. 2.13.1).
+ *
+ * A class file cannot have both its `ACC_FINAL` and `ACC_ABSTRACT` flags
+ * set (ch. 2.8.2).
+ *
+ * A field may have at most one of its `ACC_PRIVATE`, `ACC_PROTECTED`,
+ * `ACC_PUBLIC` flags set (ch. 2.7.4).
+ *
+ * A field may not have both its `ACC_FINAL` and `ACC_VOLATILE` flags set
+ * (ch. 2.9.1).
+ *
+ * If a method has its `ACC_ABSTRACT` flag set it must not have any of its
+ * `ACC_FINAL`, `ACC_NATIVE`, `ACC_PRIVATE`, `ACC_STATIC`, `ACC_STRICT`,
+ * or `ACC_SYNCHRONIZED` flags set (ch. 2.13.3.2).
+ *
+ * All interface methods must have their `ACC_ABSTRACT` and
+ * `ACC_PUBLIC` flags set.
+ *
+ * Note for future reference: see this thread on ACC_SUPER and
+ * how its enforcement differs on the android vm.
+ * https://groups.google.com/forum/?hl=en#!topic/jvm-languages/jVhzvq8-ZIk
+ *
+ */ // Class Field Method
+ final val JAVA_ACC_PUBLIC = 0x0001 // X X X
+ final val JAVA_ACC_PRIVATE = 0x0002 // X X
+ final val JAVA_ACC_PROTECTED = 0x0004 // X X
+ final val JAVA_ACC_STATIC = 0x0008 // X X
+ final val JAVA_ACC_FINAL = 0x0010 // X X X
+ final val JAVA_ACC_SUPER = 0x0020 // X
+ final val JAVA_ACC_SYNCHRONIZED = 0x0020 // X
+ final val JAVA_ACC_VOLATILE = 0x0040 // X
+ final val JAVA_ACC_BRIDGE = 0x0040 // X
+ final val JAVA_ACC_TRANSIENT = 0x0080 // X
+ final val JAVA_ACC_VARARGS = 0x0080 // X
+ final val JAVA_ACC_NATIVE = 0x0100 // X
+ final val JAVA_ACC_INTERFACE = 0x0200 // X
+ final val JAVA_ACC_ABSTRACT = 0x0400 // X X
+ final val JAVA_ACC_STRICT = 0x0800 // X
+ final val JAVA_ACC_SYNTHETIC = 0x1000 // X X X
+ final val JAVA_ACC_ANNOTATION = 0x2000 // X
+ final val JAVA_ACC_ENUM = 0x4000 // X X
+
+ // tags describing the type of a literal in the constant pool
+ final val CONSTANT_UTF8 = 1
+ final val CONSTANT_UNICODE = 2
+ final val CONSTANT_INTEGER = 3
+ final val CONSTANT_FLOAT = 4
+ final val CONSTANT_LONG = 5
+ final val CONSTANT_DOUBLE = 6
+ final val CONSTANT_CLASS = 7
+ final val CONSTANT_STRING = 8
+ final val CONSTANT_FIELDREF = 9
+ final val CONSTANT_METHODREF = 10
+ final val CONSTANT_INTFMETHODREF = 11
+ final val CONSTANT_NAMEANDTYPE = 12
+
+ final val CONSTANT_METHODHANDLE = 15
+ final val CONSTANT_METHODTYPE = 16
+ final val CONSTANT_INVOKEDYNAMIC = 18
+
+ // tags describing the type of a literal in attribute values
+ final val BYTE_TAG = 'B'
+ final val CHAR_TAG = 'C'
+ final val DOUBLE_TAG = 'D'
+ final val FLOAT_TAG = 'F'
+ final val INT_TAG = 'I'
+ final val LONG_TAG = 'J'
+ final val SHORT_TAG = 'S'
+ final val BOOL_TAG = 'Z'
+ final val STRING_TAG = 's'
+ final val ENUM_TAG = 'e'
+ final val CLASS_TAG = 'c'
+ final val ARRAY_TAG = '['
+ final val VOID_TAG = 'V'
+ final val TVAR_TAG = 'T'
+ final val OBJECT_TAG = 'L'
+ final val ANNOTATION_TAG = '@'
+ final val SCALA_NOTHING = "scala.runtime.Nothing$"
+ final val SCALA_NULL = "scala.runtime.Null$"
+
+
+ // tags describing the type of newarray
+ final val T_BOOLEAN = 4
+ final val T_CHAR = 5
+ final val T_FLOAT = 6
+ final val T_DOUBLE = 7
+ final val T_BYTE = 8
+ final val T_SHORT = 9
+ final val T_INT = 10
+ final val T_LONG = 11
+
+ // JVM mnemonics
+ final val nop = 0x00
+ final val aconst_null = 0x01
+ final val iconst_m1 = 0x02
+
+ final val iconst_0 = 0x03
+ final val iconst_1 = 0x04
+ final val iconst_2 = 0x05
+ final val iconst_3 = 0x06
+ final val iconst_4 = 0x07
+ final val iconst_5 = 0x08
+
+ final val lconst_0 = 0x09
+ final val lconst_1 = 0x0a
+ final val fconst_0 = 0x0b
+ final val fconst_1 = 0x0c
+ final val fconst_2 = 0x0d
+ final val dconst_0 = 0x0e
+ final val dconst_1 = 0x0f
+
+ final val bipush = 0x10
+ final val sipush = 0x11
+ final val ldc = 0x12
+ final val ldc_w = 0x13
+ final val ldc2_w = 0x14
+
+ final val iload = 0x15
+ final val lload = 0x16
+ final val fload = 0x17
+ final val dload = 0x18
+ final val aload = 0x19
+
+ final val iload_0 = 0x1a
+ final val iload_1 = 0x1b
+ final val iload_2 = 0x1c
+ final val iload_3 = 0x1d
+ final val lload_0 = 0x1e
+ final val lload_1 = 0x1f
+ final val lload_2 = 0x20
+ final val lload_3 = 0x21
+ final val fload_0 = 0x22
+ final val fload_1 = 0x23
+ final val fload_2 = 0x24
+ final val fload_3 = 0x25
+ final val dload_0 = 0x26
+ final val dload_1 = 0x27
+ final val dload_2 = 0x28
+ final val dload_3 = 0x29
+ final val aload_0 = 0x2a
+ final val aload_1 = 0x2b
+ final val aload_2 = 0x2c
+ final val aload_3 = 0x2d
+ final val iaload = 0x2e
+ final val laload = 0x2f
+ final val faload = 0x30
+ final val daload = 0x31
+ final val aaload = 0x32
+ final val baload = 0x33
+ final val caload = 0x34
+ final val saload = 0x35
+
+ final val istore = 0x36
+ final val lstore = 0x37
+ final val fstore = 0x38
+ final val dstore = 0x39
+ final val astore = 0x3a
+ final val istore_0 = 0x3b
+ final val istore_1 = 0x3c
+ final val istore_2 = 0x3d
+ final val istore_3 = 0x3e
+ final val lstore_0 = 0x3f
+ final val lstore_1 = 0x40
+ final val lstore_2 = 0x41
+ final val lstore_3 = 0x42
+ final val fstore_0 = 0x43
+ final val fstore_1 = 0x44
+ final val fstore_2 = 0x45
+ final val fstore_3 = 0x46
+ final val dstore_0 = 0x47
+ final val dstore_1 = 0x48
+ final val dstore_2 = 0x49
+ final val dstore_3 = 0x4a
+ final val astore_0 = 0x4b
+ final val astore_1 = 0x4c
+ final val astore_2 = 0x4d
+ final val astore_3 = 0x4e
+ final val iastore = 0x4f
+ final val lastore = 0x50
+ final val fastore = 0x51
+ final val dastore = 0x52
+ final val aastore = 0x53
+ final val bastore = 0x54
+ final val castore = 0x55
+ final val sastore = 0x56
+
+ final val pop = 0x57
+ final val pop2 = 0x58
+ final val dup = 0x59
+ final val dup_x1 = 0x5a
+ final val dup_x2 = 0x5b
+ final val dup2 = 0x5c
+ final val dup2_x1 = 0x5d
+ final val dup2_x2 = 0x5e
+ final val swap = 0x5f
+
+ final val iadd = 0x60
+ final val ladd = 0x61
+ final val fadd = 0x62
+ final val dadd = 0x63
+ final val isub = 0x64
+ final val lsub = 0x65
+ final val fsub = 0x66
+ final val dsub = 0x67
+ final val imul = 0x68
+ final val lmul = 0x69
+ final val fmul = 0x6a
+ final val dmul = 0x6b
+ final val idiv = 0x6c
+ final val ldiv = 0x6d
+ final val fdiv = 0x6e
+ final val ddiv = 0x6f
+ final val irem = 0x70
+ final val lrem = 0x71
+ final val frem = 0x72
+ final val drem = 0x73
+
+ final val ineg = 0x74
+ final val lneg = 0x75
+ final val fneg = 0x76
+ final val dneg = 0x77
+
+ final val ishl = 0x78
+ final val lshl = 0x79
+ final val ishr = 0x7a
+ final val lshr = 0x7b
+ final val iushr = 0x7c
+ final val lushr = 0x7d
+ final val iand = 0x7e
+ final val land = 0x7f
+ final val ior = 0x80
+ final val lor = 0x81
+ final val ixor = 0x82
+ final val lxor = 0x83
+ final val iinc = 0x84
+
+ final val i2l = 0x85
+ final val i2f = 0x86
+ final val i2d = 0x87
+ final val l2i = 0x88
+ final val l2f = 0x89
+ final val l2d = 0x8a
+ final val f2i = 0x8b
+ final val f2l = 0x8c
+ final val f2d = 0x8d
+ final val d2i = 0x8e
+ final val d2l = 0x8f
+ final val d2f = 0x90
+ final val i2b = 0x91
+ final val i2c = 0x92
+ final val i2s = 0x93
+
+ final val lcmp = 0x94
+ final val fcmpl = 0x95
+ final val fcmpg = 0x96
+ final val dcmpl = 0x97
+ final val dcmpg = 0x98
+
+ final val ifeq = 0x99
+ final val ifne = 0x9a
+ final val iflt = 0x9b
+ final val ifge = 0x9c
+ final val ifgt = 0x9d
+ final val ifle = 0x9e
+ final val if_icmpeq = 0x9f
+ final val if_icmpne = 0xa0
+ final val if_icmplt = 0xa1
+ final val if_icmpge = 0xa2
+ final val if_icmpgt = 0xa3
+ final val if_icmple = 0xa4
+ final val if_acmpeq = 0xa5
+ final val if_acmpne = 0xa6
+ final val goto = 0xa7
+ final val jsr = 0xa8
+ final val ret = 0xa9
+ final val tableswitch = 0xaa
+ final val lookupswitch = 0xab
+ final val ireturn = 0xac
+ final val lreturn = 0xad
+ final val freturn = 0xae
+ final val dreturn = 0xaf
+ final val areturn = 0xb0
+ final val return_ = 0xb1
+
+ final val getstatic = 0xb2
+ final val putstatic = 0xb3
+ final val getfield = 0xb4
+ final val putfield = 0xb5
+
+ final val invokevirtual = 0xb6
+ final val invokespecial = 0xb7
+ final val invokestatic = 0xb8
+ final val invokeinterface = 0xb9
+ final val xxxunusedxxxx = 0xba
+
+ final val new_ = 0xbb
+ final val newarray = 0xbc
+ final val anewarray = 0xbd
+ final val arraylength = 0xbe
+ final val athrow = 0xbf
+ final val checkcast = 0xc0
+ final val instanceof = 0xc1
+ final val monitorenter = 0xc2
+ final val monitorexit = 0xc3
+ final val wide = 0xc4
+ final val multianewarray = 0xc5
+ final val ifnull = 0xc6
+ final val ifnonnull = 0xc7
+ final val goto_w = 0xc8
+ final val jsr_w = 0xc9
+
+ // reserved opcodes
+ final val breakpoint = 0xca
+ final val impdep1 = 0xfe
+ final val impdep2 = 0xff
+
+ import Flags._
+ abstract class FlagTranslation {
+
+ protected def baseFlags(jflags: Int) = EmptyFlags
+ protected def isClass: Boolean = false
+
+ private def translateFlag(jflag: Int): FlagSet = (jflag: @switch) match {
+ case JAVA_ACC_PRIVATE => Private
+ case JAVA_ACC_PROTECTED => Protected
+ case JAVA_ACC_FINAL => Final
+ case JAVA_ACC_SYNTHETIC => Synthetic
+ case JAVA_ACC_STATIC => JavaStatic
+ case JAVA_ACC_ABSTRACT => if (isClass) Abstract else Deferred
+ case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined
+ case _ => EmptyFlags
+ }
+
+ private def addFlag(base: FlagSet, jflag: Int): FlagSet =
+ if (jflag == 0) base else base | translateFlag(jflag)
+
+ private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = {
+ val nflags =
+ if ((jflags & JAVA_ACC_ANNOTATION) == 0) jflags
+ else jflags & ~(JAVA_ACC_ABSTRACT | JAVA_ACC_INTERFACE) // annotations are neither abstract nor interfaces
+ var res: FlagSet = baseFlags | JavaDefined
+ res = addFlag(res, nflags & JAVA_ACC_PRIVATE)
+ res = addFlag(res, nflags & JAVA_ACC_PROTECTED)
+ res = addFlag(res, nflags & JAVA_ACC_FINAL)
+ res = addFlag(res, nflags & JAVA_ACC_SYNTHETIC)
+ res = addFlag(res, nflags & JAVA_ACC_STATIC)
+ res = addFlag(res, nflags & JAVA_ACC_ABSTRACT)
+ res = addFlag(res, nflags & JAVA_ACC_INTERFACE)
+ res
+ }
+
+ def flags(jflags: Int): FlagSet = translateFlags(jflags, baseFlags(jflags))
+ }
+ val classTranslation = new FlagTranslation {
+ override def isClass = true
+ }
+ val fieldTranslation = new FlagTranslation {
+ override def baseFlags(jflags: Int) = if ((jflags & JAVA_ACC_FINAL) == 0) Mutable else EmptyFlags
+ }
+ val methodTranslation = new FlagTranslation {
+ override def baseFlags(jflags: Int) = if ((jflags & JAVA_ACC_BRIDGE) != 0) Bridge else EmptyFlags
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
new file mode 100644
index 000000000..97a82e80d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -0,0 +1,1100 @@
+package dotty.tools
+package dotc
+package core
+package classfile
+
+import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._, Decorators._
+import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Positions._
+import ast.tpd._
+import java.io.{ File, IOException }
+import java.lang.Integer.toHexString
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
+import scala.annotation.switch
+import typer.Checking.checkNonCyclic
+import io.AbstractFile
+import scala.util.control.NonFatal
+
+object ClassfileParser {
+ /** Marker trait for unpicklers that can be embedded in classfiles. */
+ trait Embedded
+}
+
+class ClassfileParser(
+ classfile: AbstractFile,
+ classRoot: ClassDenotation,
+ moduleRoot: ClassDenotation)(ictx: Context) {
+
+ import ClassfileConstants._
+ import ClassfileParser._
+
+ protected val in = new AbstractFileReader(classfile)
+
+ protected val staticModule: Symbol = moduleRoot.sourceModule(ictx)
+
+ protected val instanceScope: MutableScope = newScope // the scope of all instance definitions
+ protected val staticScope: MutableScope = newScope // the scope of all static definitions
+ protected var pool: ConstantPool = _ // the classfile's constant pool
+
+ protected var currentClassName: Name = _ // JVM name of the current class
+ protected var classTParams = Map[Name,Symbol]()
+
+ classRoot.info = (new NoCompleter).withDecls(instanceScope)
+ moduleRoot.info = (new NoCompleter).withDecls(staticScope).withSourceModule(_ => staticModule)
+
+ private def currentIsTopLevel(implicit ctx: Context) = classRoot.owner is Flags.PackageClass
+
+ private def mismatchError(c: Symbol) =
+ throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
+
+ def run()(implicit ctx: Context): Option[Embedded] = try {
+ ctx.debuglog("[class] >> " + classRoot.fullName)
+ parseHeader
+ this.pool = new ConstantPool
+ parseClass()
+ } catch {
+ case e: RuntimeException =>
+ if (ctx.debug) e.printStackTrace()
+ throw new IOException(
+ i"""class file $classfile is broken, reading aborted with ${e.getClass}
+ |${Option(e.getMessage).getOrElse("")}""")
+ }
+
+ private def parseHeader(): Unit = {
+ val magic = in.nextInt
+ if (magic != JAVA_MAGIC)
+ throw new IOException(s"class file '${in.file}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}")
+ val minorVersion = in.nextChar.toInt
+ val majorVersion = in.nextChar.toInt
+ if ((majorVersion < JAVA_MAJOR_VERSION) ||
+ ((majorVersion == JAVA_MAJOR_VERSION) &&
+ (minorVersion < JAVA_MINOR_VERSION)))
+ throw new IOException(
+ s"class file '${in.file}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
+ }
+
+ /** Return the class symbol of the given name. */
+ def classNameToSymbol(name: Name)(implicit ctx: Context): Symbol = innerClasses.get(name) match {
+ case Some(entry) => innerClasses.classSymbol(entry.externalName)
+ case None => ctx.requiredClass(name)
+ }
+
+ var sawPrivateConstructor = false
+
+ def parseClass()(implicit ctx: Context): Option[Embedded] = {
+ val jflags = in.nextChar
+ val isAnnotation = hasAnnotation(jflags)
+ val sflags = classTranslation.flags(jflags)
+ val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val nameIdx = in.nextChar
+ currentClassName = pool.getClassName(nameIdx)
+
+ if (currentIsTopLevel) {
+ val c = pool.getClassSymbol(nameIdx)
+ if (c != classRoot.symbol) mismatchError(c)
+ }
+
+ addEnclosingTParams()
+
+ /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
+ * Updates the read pointer of 'in'. */
+ def parseParents: List[Type] = {
+ val superType = if (isAnnotation) { in.nextChar; defn.AnnotationType }
+ else pool.getSuperClass(in.nextChar).typeRef
+ val ifaceCount = in.nextChar
+ var ifaces = for (i <- (0 until ifaceCount).toList) yield pool.getSuperClass(in.nextChar).typeRef
+ // Dotty deviation: was
+ // var ifaces = for (i <- List.range(0 until ifaceCount)) ...
+ // This does not typecheck because the type parameter of List is now lower-bounded by Int | Char.
+ // Consequently, no best implicit for the "Integral" evidence parameter of "range"
+ // is found. If we treat constant subtyping specially, we might be able
+ // to do something there. But in any case, the until should be more efficient.
+
+ if (isAnnotation) ifaces = defn.ClassfileAnnotationType :: ifaces
+ superType :: ifaces
+ }
+
+ val result = unpickleOrParseInnerClasses()
+ if (!result.isDefined) {
+ var classInfo: Type = TempClassInfoType(parseParents, instanceScope, classRoot.symbol)
+ // might be reassigned by later parseAttributes
+ val staticInfo = TempClassInfoType(List(), staticScope, moduleRoot.symbol)
+
+ enterOwnInnerClasses
+
+ classRoot.setFlag(sflags)
+ moduleRoot.setFlag(Flags.JavaDefined | Flags.ModuleClassCreationFlags)
+ setPrivateWithin(classRoot, jflags)
+ setPrivateWithin(moduleRoot, jflags)
+ setPrivateWithin(moduleRoot.sourceModule, jflags)
+
+ for (i <- 0 until in.nextChar) parseMember(method = false)
+ for (i <- 0 until in.nextChar) parseMember(method = true)
+ classInfo = parseAttributes(classRoot.symbol, classInfo)
+ if (isAnnotation) addAnnotationConstructor(classInfo)
+
+ val companionClassMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, classRoot, moduleRoot)
+ if (companionClassMethod.exists) companionClassMethod.entered
+ val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, moduleRoot, classRoot)
+ if (companionModuleMethod.exists) companionModuleMethod.entered
+
+ setClassInfo(classRoot, classInfo)
+ setClassInfo(moduleRoot, staticInfo)
+ }
+
+ // eager load java enum definitions for exhaustivity check of pattern match
+ if (isEnum) {
+ instanceScope.toList.map(_.ensureCompleted())
+ staticScope.toList.map(_.ensureCompleted())
+ classRoot.setFlag(Flags.Enum)
+ moduleRoot.setFlag(Flags.Enum)
+ }
+
+ result
+ }
+
+ /** Add type parameters of enclosing classes */
+ def addEnclosingTParams()(implicit ctx: Context): Unit = {
+ var sym = classRoot.owner
+ while (sym.isClass && !(sym is Flags.ModuleClass)) {
+ for (tparam <- sym.typeParams) {
+ classTParams = classTParams.updated(tparam.name.unexpandedName, tparam)
+ }
+ sym = sym.owner
+ }
+ }
+
+ def parseMember(method: Boolean)(implicit ctx: Context): Unit = {
+ val start = indexCoord(in.bp)
+ val jflags = in.nextChar
+ val sflags =
+ if (method) Flags.Method | methodTranslation.flags(jflags)
+ else fieldTranslation.flags(jflags)
+ val name = pool.getName(in.nextChar)
+ if (!(sflags is Flags.Private) || name == nme.CONSTRUCTOR || ctx.settings.optimise.value) {
+ val member = ctx.newSymbol(
+ getOwner(jflags), name, sflags, memberCompleter, coord = start)
+ getScope(jflags).enter(member)
+ }
+ // skip rest of member for now
+ in.nextChar // info
+ skipAttributes
+ }
+
+ val memberCompleter = new LazyType {
+
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val oldbp = in.bp
+ try {
+ in.bp = denot.symbol.coord.toIndex
+ val sym = denot.symbol
+ val jflags = in.nextChar
+ val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val name = pool.getName(in.nextChar)
+ val isConstructor = name eq nme.CONSTRUCTOR
+
+ /** Strip leading outer param from constructor.
+ * Todo: Also strip trailing access tag for private inner constructors?
+ */
+ def stripOuterParamFromConstructor() = innerClasses.get(currentClassName) match {
+ case Some(entry) if !isStatic(entry.jflags) =>
+ val mt @ MethodType(paramnames, paramtypes) = denot.info
+ denot.info = mt.derivedMethodType(paramnames.tail, paramtypes.tail, mt.resultType)
+ case _ =>
+ }
+
+ /** Make return type of constructor be the enclosing class type,
+ * and make constructor type polymorphic in the type parameters of the class
+ */
+ def normalizeConstructorInfo() = {
+ val mt @ MethodType(paramnames, paramtypes) = denot.info
+ val rt = classRoot.typeRef appliedTo (classRoot.typeParams map (_.typeRef))
+ denot.info = mt.derivedMethodType(paramnames, paramtypes, rt)
+ addConstructorTypeParams(denot)
+ }
+
+ denot.info = pool.getType(in.nextChar)
+ if (isEnum) denot.info = ConstantType(Constant(sym))
+ if (isConstructor) stripOuterParamFromConstructor()
+ setPrivateWithin(denot, jflags)
+ denot.info = translateTempPoly(parseAttributes(sym, denot.info))
+ if (isConstructor) normalizeConstructorInfo()
+
+ if ((denot is Flags.Method) && (jflags & JAVA_ACC_VARARGS) != 0)
+ denot.info = arrayToRepeated(denot.info)
+
+ // seal java enums
+ if (isEnum) {
+ val enumClass = sym.owner.linkedClass
+ if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
+ enumClass.addAnnotation(Annotation.makeChild(sym))
+ }
+ } finally {
+ in.bp = oldbp
+ }
+ }
+ }
+
+ /** Map direct references to Object to references to Any */
+ final def objToAny(tp: Type)(implicit ctx: Context) =
+ if (tp.isDirectRef(defn.ObjectClass) && !ctx.phase.erasedTypes) defn.AnyType else tp
+
+ private def sigToType(sig: TermName, owner: Symbol = null)(implicit ctx: Context): Type = {
+ var index = 0
+ val end = sig.length
+ def accept(ch: Char): Unit = {
+ assert(sig(index) == ch, (sig(index), ch))
+ index += 1
+ }
+ def subName(isDelimiter: Char => Boolean): TermName = {
+ val start = index
+ while (!isDelimiter(sig(index))) { index += 1 }
+ sig.slice(start, index)
+ }
+ // Warning: sigToType contains nested completers which might be forced in a later run!
+ // So local methods need their own ctx parameters.
+ def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean)(implicit ctx: Context): Type = {
+ val tag = sig(index); index += 1
+ (tag: @switch) match {
+ case BYTE_TAG => defn.ByteType
+ case CHAR_TAG => defn.CharType
+ case DOUBLE_TAG => defn.DoubleType
+ case FLOAT_TAG => defn.FloatType
+ case INT_TAG => defn.IntType
+ case LONG_TAG => defn.LongType
+ case SHORT_TAG => defn.ShortType
+ case VOID_TAG => defn.UnitType
+ case BOOL_TAG => defn.BooleanType
+ case 'L' =>
+ def processInner(tp: Type): Type = tp match {
+ case tp: TypeRef if !(tp.symbol.owner is Flags.ModuleClass) =>
+ TypeRef(processInner(tp.prefix.widen), tp.name)
+ case _ =>
+ tp
+ }
+ def processClassType(tp: Type): Type = tp match {
+ case tp: TypeRef =>
+ if (sig(index) == '<') {
+ accept('<')
+ var tp1: Type = tp
+ var formals = tp.typeParamSymbols
+ while (sig(index) != '>') {
+ sig(index) match {
+ case variance @ ('+' | '-' | '*') =>
+ index += 1
+ val bounds = variance match {
+ case '+' => objToAny(TypeBounds.upper(sig2type(tparams, skiptvs)))
+ case '-' =>
+ val tp = sig2type(tparams, skiptvs)
+ // sig2type seems to return AnyClass regardless of the situation:
+ // we don't want Any as a LOWER bound.
+ if (tp.isDirectRef(defn.AnyClass)) TypeBounds.empty
+ else TypeBounds.lower(tp)
+ case '*' => TypeBounds.empty
+ }
+ tp1 = RefinedType(tp1, formals.head.name, bounds)
+ case _ =>
+ tp1 = RefinedType(tp1, formals.head.name, TypeAlias(sig2type(tparams, skiptvs)))
+ }
+ formals = formals.tail
+ }
+ accept('>')
+ tp1
+ } else tp
+ case tp =>
+ assert(sig(index) != '<', tp)
+ tp
+ }
+
+ val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
+ var tpe = processClassType(processInner(classSym.typeRef))
+ while (sig(index) == '.') {
+ accept('.')
+ val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
+ val clazz = tpe.member(name).symbol
+ tpe = processClassType(processInner(clazz.typeRef))
+ }
+ accept(';')
+ tpe
+ case ARRAY_TAG =>
+ while ('0' <= sig(index) && sig(index) <= '9') index += 1
+ var elemtp = sig2type(tparams, skiptvs)
+ // make unbounded Array[T] where T is a type variable into Ar ray[T with Object]
+ // (this is necessary because such arrays have a representation which is incompatible
+ // with arrays of primitive types.
+ // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
+ // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
+ // see also RestrictJavaArraysMap (when compiling java sources directly)
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp.derivesFrom(defn.ObjectClass))) {
+ elemtp = AndType(elemtp, defn.ObjectType)
+ }
+ defn.ArrayOf(elemtp)
+ case '(' =>
+ // we need a method symbol. given in line 486 by calling getType(methodSym, ..)
+ val paramtypes = new ListBuffer[Type]()
+ var paramnames = new ListBuffer[TermName]()
+ while (sig(index) != ')') {
+ paramnames += nme.syntheticParamName(paramtypes.length)
+ paramtypes += objToAny(sig2type(tparams, skiptvs))
+ }
+ index += 1
+ val restype = sig2type(tparams, skiptvs)
+ JavaMethodType(paramnames.toList, paramtypes.toList)(_ => restype)
+ case 'T' =>
+ val n = subName(';'.==).toTypeName
+ index += 1
+ //assert(tparams contains n, s"classTparams = $classTParams, tparams = $tparams, key = $n")
+ if (skiptvs) defn.AnyType else tparams(n).typeRef
+ }
+ } // sig2type(tparams, skiptvs)
+
+ def sig2typeBounds(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(implicit ctx: Context): Type = {
+ val ts = new ListBuffer[Type]
+ while (sig(index) == ':') {
+ index += 1
+ if (sig(index) != ':') // guard against empty class bound
+ ts += objToAny(sig2type(tparams, skiptvs))
+ }
+ TypeBounds.upper(((NoType: Type) /: ts)(_ & _) orElse defn.AnyType)
+ }
+
+ var tparams = classTParams
+
+ def typeParamCompleter(start: Int) = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val savedIndex = index
+ try {
+ index = start
+ denot.info =
+ checkNonCyclic( // we need the checkNonCyclic call to insert LazyRefs for F-bounded cycles
+ denot.symbol,
+ sig2typeBounds(tparams, skiptvs = false),
+ reportErrors = false)
+ } finally {
+ index = savedIndex
+ }
+ }
+ }
+
+ val newTParams = new ListBuffer[Symbol]()
+ if (sig(index) == '<') {
+ assert(owner != null)
+ index += 1
+ val start = index
+ while (sig(index) != '>') {
+ val tpname = subName(':'.==).toTypeName
+ val expname = if (owner.isClass) tpname.expandedName(owner) else tpname
+ val s = ctx.newSymbol(
+ owner, expname, owner.typeParamCreationFlags,
+ typeParamCompleter(index), coord = indexCoord(index))
+ if (owner.isClass) owner.asClass.enter(s)
+ tparams = tparams + (tpname -> s)
+ sig2typeBounds(tparams, skiptvs = true)
+ newTParams += s
+ }
+ index += 1
+ }
+ val ownTypeParams = newTParams.toList.asInstanceOf[List[TypeSymbol]]
+ val tpe =
+ if ((owner == null) || !owner.isClass)
+ sig2type(tparams, skiptvs = false)
+ else {
+ classTParams = tparams
+ val parents = new ListBuffer[Type]()
+ while (index < end) {
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
+ }
+ TempClassInfoType(parents.toList, instanceScope, owner)
+ }
+ if (ownTypeParams.isEmpty) tpe else TempPolyType(ownTypeParams, tpe)
+ } // sigToType
+
+ def parseAnnotArg(skip: Boolean = false)(implicit ctx: Context): Option[Tree] = {
+ val tag = in.nextByte.toChar
+ val index = in.nextChar
+ tag match {
+ case STRING_TAG =>
+ if (skip) None else Some(Literal(Constant(pool.getName(index).toString)))
+ case BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | INT_TAG |
+ LONG_TAG | FLOAT_TAG | DOUBLE_TAG =>
+ if (skip) None else Some(Literal(pool.getConstant(index)))
+ case CLASS_TAG =>
+ if (skip) None else Some(Literal(Constant(pool.getType(index))))
+ case ENUM_TAG =>
+ val t = pool.getType(index)
+ val n = pool.getName(in.nextChar)
+ val module = t.typeSymbol.companionModule
+ val s = module.info.decls.lookup(n)
+ if (skip) {
+ None
+ } else if (s != NoSymbol) {
+ Some(Literal(Constant(s)))
+ } else {
+ ctx.warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""")
+ None
+ }
+ case ARRAY_TAG =>
+ val arr = new ArrayBuffer[Tree]()
+ var hasError = false
+ for (i <- 0 until index)
+ parseAnnotArg(skip) match {
+ case Some(c) => arr += c
+ case None => hasError = true
+ }
+ if (hasError) None
+ else if (skip) None
+ else {
+ val elems = arr.toList
+ val elemType =
+ if (elems.isEmpty) defn.ObjectType
+ else ctx.typeComparer.lub(elems.tpes).widen
+ Some(JavaSeqLiteral(elems, TypeTree(elemType)))
+ }
+ case ANNOTATION_TAG =>
+ parseAnnotation(index, skip) map (_.tree)
+ }
+ }
+
+ /** Parse and return a single annotation. If it is malformed,
+ * return None.
+ */
+ def parseAnnotation(attrNameIndex: Char, skip: Boolean = false)(implicit ctx: Context): Option[Annotation] = try {
+ val attrType = pool.getType(attrNameIndex)
+ val nargs = in.nextChar
+ val argbuf = new ListBuffer[Tree]
+ var hasError = false
+ for (i <- 0 until nargs) {
+ val name = pool.getName(in.nextChar)
+ parseAnnotArg(skip) match {
+ case Some(arg) => argbuf += NamedArg(name, arg)
+ case None => hasError = !skip
+ }
+ }
+ if (hasError || skip) None
+ else Some(Annotation.deferredResolve(attrType, argbuf.toList))
+ } catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case NonFatal(ex) =>
+ // We want to be robust when annotations are unavailable, so the very least
+ // we can do is warn the user about the exception
+ // There was a reference to ticket 1135, but that is outdated: a reference to a class not on
+ // the classpath would *not* end up here. A class not found is signaled
+ // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
+ // and that should never be swallowed silently.
+ ctx.warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ if (ctx.debug) ex.printStackTrace()
+
+ None // ignore malformed annotations
+ }
+
+ def parseAttributes(sym: Symbol, symtype: Type)(implicit ctx: Context): Type = {
+ def convertTo(c: Constant, pt: Type): Constant = {
+ if (pt == defn.BooleanType && c.tag == IntTag)
+ Constant(c.value != 0)
+ else
+ c convertTo pt
+ }
+ var newType = symtype
+
+ def parseAttribute(): Unit = {
+ val attrName = pool.getName(in.nextChar).toTypeName
+ val attrLen = in.nextInt
+ val end = in.bp + attrLen
+ attrName match {
+ case tpnme.SignatureATTR =>
+ val sig = pool.getExternalName(in.nextChar)
+ newType = sigToType(sig, sym)
+ if (ctx.debug && ctx.verbose)
+ println("" + sym + "; signature = " + sig + " type = " + newType)
+ case tpnme.SyntheticATTR =>
+ sym.setFlag(Flags.SyntheticArtifact)
+ case tpnme.BridgeATTR =>
+ sym.setFlag(Flags.Bridge)
+ case tpnme.DeprecatedATTR =>
+ val msg = Literal(Constant("see corresponding Javadoc for more information."))
+ val since = Literal(Constant(""))
+ sym.addAnnotation(Annotation(defn.DeprecatedAnnot, msg, since))
+ case tpnme.ConstantValueATTR =>
+ val c = pool.getConstant(in.nextChar)
+ val c1 = convertTo(c, symtype)
+ if (c1 ne null) newType = ConstantType(c1)
+ else println("failure to convert " + c + " to " + symtype); //debug
+ case tpnme.AnnotationDefaultATTR =>
+ sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil))
+ // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
+ case tpnme.RuntimeAnnotationATTR =>
+ parseAnnotations(attrLen)
+
+ // TODO 1: parse runtime visible annotations on parameters
+ // case tpnme.RuntimeParamAnnotationATTR
+
+ // TODO 2: also parse RuntimeInvisibleAnnotation / RuntimeInvisibleParamAnnotation,
+ // i.e. java annotations with RetentionPolicy.CLASS?
+
+ case tpnme.ExceptionsATTR =>
+ parseExceptions(attrLen)
+
+ case tpnme.CodeATTR =>
+ if (sym.owner is Flags.JavaTrait) {
+ sym.resetFlag(Flags.Deferred)
+ sym.owner.resetFlag(Flags.PureInterface)
+ ctx.log(s"$sym in ${sym.owner} is a java8+ default method.")
+ }
+ in.skip(attrLen)
+
+ case _ =>
+ }
+ in.bp = end
+ }
+
+ /**
+ * Parse the "Exceptions" attribute which denotes the exceptions
+ * thrown by a method.
+ */
+ def parseExceptions(len: Int): Unit = {
+ val nClasses = in.nextChar
+ for (n <- 0 until nClasses) {
+ // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
+ val cls = pool.getClassSymbol(in.nextChar.toInt)
+ sym.addAnnotation(ThrowsAnnotation(cls.asClass))
+ }
+ }
+
+ /** Parse a sequence of annotations and attaches them to the
+ * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+ def parseAnnotations(len: Int): Unit = {
+ val nAttr = in.nextChar
+ for (n <- 0 until nAttr)
+ parseAnnotation(in.nextChar) match {
+ case Some(annot) =>
+ sym.addAnnotation(annot)
+ case None =>
+ }
+ }
+
+ // begin parseAttributes
+ for (i <- 0 until in.nextChar) {
+ parseAttribute()
+ }
+ newType
+ }
+
+ /** Add synthetic constructor(s) and potentially also default getters which
+ * reflects the fields of the annotation with given `classInfo`.
+ * Annotations in Scala are assumed to get all their arguments as constructor
+ * parameters. For Java annotations we need to fake it by making up the constructor.
+ * Note that default getters have type Nothing. That's OK because we need
+ * them only to signal that the corresponding parameter is optional.
+ */
+ def addAnnotationConstructor(classInfo: Type, tparams: List[TypeSymbol] = Nil)(implicit ctx: Context): Unit = {
+ def addDefaultGetter(attr: Symbol, n: Int) =
+ ctx.newSymbol(
+ owner = moduleRoot.symbol,
+ name = nme.CONSTRUCTOR.defaultGetterName(n),
+ flags = attr.flags & Flags.AccessFlags,
+ info = defn.NothingType).entered
+
+ classInfo match {
+ case classInfo @ TempPolyType(tparams, restpe) if tparams.isEmpty =>
+ addAnnotationConstructor(restpe, tparams)
+ case classInfo: TempClassInfoType =>
+ val attrs = classInfo.decls.toList.filter(_.isTerm)
+ val targs = tparams.map(_.typeRef)
+ val paramNames = attrs.map(_.name.asTermName)
+ val paramTypes = attrs.map(_.info.resultType)
+
+ def addConstr(ptypes: List[Type]) = {
+ val mtype = MethodType(paramNames, ptypes, classRoot.typeRef.appliedTo(targs))
+ val constrType = if (tparams.isEmpty) mtype else TempPolyType(tparams, mtype)
+ val constr = ctx.newSymbol(
+ owner = classRoot.symbol,
+ name = nme.CONSTRUCTOR,
+ flags = Flags.Synthetic,
+ info = constrType
+ ).entered
+ for ((attr, i) <- attrs.zipWithIndex)
+ if (attr.hasAnnotation(defn.AnnotationDefaultAnnot)) {
+ constr.setFlag(Flags.HasDefaultParams)
+ addDefaultGetter(attr, i)
+ }
+ }
+
+ addConstr(paramTypes)
+
+ // The code below added an extra constructor to annotations where the
+ // last parameter of the constructor is an Array[X] for some X, the
+ // array was replaced by a vararg argument. Unfortunately this breaks
+ // inference when doing:
+ // @Annot(Array())
+ // The constructor is overloaded so the expected type of `Array()` is
+ // WildcardType, and the type parameter of the Array apply method gets
+ // instantiated to `Nothing` instead of `X`.
+ // I'm leaving this commented out in case we improve inference to make this work.
+ // Note that if this is reenabled then JavaParser will also need to be modified
+ // to add the extra constructor (this was not implemented before).
+ /*
+ if (paramTypes.nonEmpty)
+ paramTypes.last match {
+ case defn.ArrayOf(elemtp) =>
+ addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
+ case _ =>
+ }
+ */
+ }
+ }
+
+ /** Enter own inner classes in the right scope. It needs the scopes to be set up,
+ * and implicitly current class' superclasses.
+ */
+ private def enterOwnInnerClasses()(implicit ctx: Context): Unit = {
+ def className(name: Name): Name = name.drop(name.lastIndexOf('.') + 1)
+
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = {
+ ctx.base.loaders.enterClassAndModule(
+ getOwner(jflags),
+ entry.originalName,
+ new ClassfileLoader(file),
+ classTranslation.flags(jflags),
+ getScope(jflags))
+ }
+
+ for (entry <- innerClasses.values) {
+ // create a new class member for immediate inner classes
+ if (entry.outerName == currentClassName) {
+ val file = ctx.platform.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+ throw new AssertionError(entry.externalName)
+ }
+ enterClassAndModule(entry, file, entry.jflags)
+ }
+ }
+ }
+
+ /** Parse inner classes. Expects `in.bp` to point to the superclass entry.
+ * Restores the old `bp`.
+ * @return true iff classfile is from Scala, so no Java info needs to be read.
+ */
+ def unpickleOrParseInnerClasses()(implicit ctx: Context): Option[Embedded] = {
+ val oldbp = in.bp
+ try {
+ skipSuperclasses()
+ skipMembers() // fields
+ skipMembers() // methods
+ val attrs = in.nextChar
+ val attrbp = in.bp
+
+ def scan(target: TypeName): Boolean = {
+ in.bp = attrbp
+ var i = 0
+ while (i < attrs && pool.getName(in.nextChar).toTypeName != target) {
+ val attrLen = in.nextInt
+ in.skip(attrLen)
+ i += 1
+ }
+ i < attrs
+ }
+
+ def unpickleScala(bytes: Array[Byte]): Some[Embedded] = {
+ val unpickler = new unpickleScala2.Scala2Unpickler(bytes, classRoot, moduleRoot)(ctx)
+ unpickler.run()(ctx.addMode(Mode.Scala2Unpickling))
+ Some(unpickler)
+ }
+
+ def unpickleTASTY(bytes: Array[Byte]): Some[Embedded] = {
+ val unpickler = new tasty.DottyUnpickler(bytes)
+ unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))
+ Some(unpickler)
+ }
+
+ def parseScalaSigBytes: Array[Byte] = {
+ val tag = in.nextByte.toChar
+ assert(tag == STRING_TAG, tag)
+ pool getBytes in.nextChar
+ }
+
+ def parseScalaLongSigBytes: Array[Byte] = {
+ val tag = in.nextByte.toChar
+ assert(tag == ARRAY_TAG, tag)
+ val stringCount = in.nextChar
+ val entries =
+ for (i <- 0 until stringCount) yield {
+ val stag = in.nextByte.toChar
+ assert(stag == STRING_TAG, stag)
+ in.nextChar.toInt
+ }
+ pool.getBytes(entries.toList)
+ }
+
+ if (scan(tpnme.TASTYATTR)) {
+ val attrLen = in.nextInt
+ return unpickleTASTY(in.nextBytes(attrLen))
+ }
+
+ if (scan(tpnme.RuntimeAnnotationATTR)) {
+ val attrLen = in.nextInt
+ val nAnnots = in.nextChar
+ var i = 0
+ while (i < nAnnots) {
+ val attrClass = pool.getType(in.nextChar).typeSymbol
+ val nArgs = in.nextChar
+ var j = 0
+ while (j < nArgs) {
+ val argName = pool.getName(in.nextChar)
+ if (argName == nme.bytes)
+ if (attrClass == defn.ScalaSignatureAnnot)
+ return unpickleScala(parseScalaSigBytes)
+ else if (attrClass == defn.ScalaLongSignatureAnnot)
+ return unpickleScala(parseScalaLongSigBytes)
+ else if (attrClass == defn.TASTYSignatureAnnot)
+ return unpickleTASTY(parseScalaSigBytes)
+ else if (attrClass == defn.TASTYLongSignatureAnnot)
+ return unpickleTASTY(parseScalaLongSigBytes)
+ parseAnnotArg(skip = true)
+ j += 1
+ }
+ i += 1
+ }
+ }
+
+ if (scan(tpnme.InnerClassesATTR)) {
+ val attrLen = in.nextInt
+ val entries = in.nextChar.toInt
+ for (i <- 0 until entries) {
+ val innerIndex = in.nextChar
+ val outerIndex = in.nextChar
+ val nameIndex = in.nextChar
+ val jflags = in.nextChar
+ if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) {
+ val entry = InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
+ innerClasses(pool.getClassName(innerIndex)) = entry
+ }
+ }
+ }
+ None
+ } finally in.bp = oldbp
+ }
+
+ /** An entry in the InnerClasses attribute of this class file. */
+ case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
+ def externalName = pool.getClassName(external)
+ def outerName = pool.getClassName(outer)
+ def originalName = pool.getName(name)
+
+ override def toString =
+ originalName + " in " + outerName + "(" + externalName + ")"
+ }
+
+ object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
+ /** Return the Symbol of the top level class enclosing `name`,
+ * or 'name's symbol if no entry found for `name`.
+ */
+ def topLevelClass(name: Name)(implicit ctx: Context): Symbol = {
+ val tlName = if (isDefinedAt(name)) {
+ var entry = this(name)
+ while (isDefinedAt(entry.outerName))
+ entry = this(entry.outerName)
+ entry.outerName
+ } else
+ name
+ classNameToSymbol(tlName)
+ }
+
+ /** Return the class symbol for `externalName`. It looks it up in its outer class.
+ * Forces all outer class symbols to be completed.
+ *
+ * If the given name is not an inner class, it returns the symbol found in `defn`.
+ */
+ def classSymbol(externalName: Name)(implicit ctx: Context): Symbol = {
+ /** Return the symbol of `innerName`, having the given `externalName`. */
+ def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
+ def getMember(sym: Symbol, name: Name): Symbol =
+ if (static)
+ if (sym == classRoot.symbol) staticScope.lookup(name)
+ else sym.companionModule.info.member(name).symbol
+ else
+ if (sym == classRoot.symbol) instanceScope.lookup(name)
+ else sym.info.member(name).symbol
+
+ innerClasses.get(externalName) match {
+ case Some(entry) =>
+ val outerName = entry.outerName.stripModuleClassSuffix
+ val owner = classSymbol(outerName)
+ val result = ctx.atPhaseNotLaterThanTyper { implicit ctx =>
+ getMember(owner, innerName.toTypeName)
+ }
+ assert(result ne NoSymbol,
+ i"""failure to resolve inner class:
+ |externalName = $externalName,
+ |outerName = $outerName,
+ |innerName = $innerName
+ |owner.fullName = ${owner.showFullName}
+ |while parsing ${classfile}""")
+ result
+
+ case None =>
+ classNameToSymbol(externalName)
+ }
+ }
+
+ get(externalName) match {
+ case Some(entry) =>
+ innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
+ case None =>
+ classNameToSymbol(externalName)
+ }
+ }
+ }
+
+ def skipAttributes(): Unit = {
+ val attrCount = in.nextChar
+ for (i <- 0 until attrCount) {
+ in.skip(2); in.skip(in.nextInt)
+ }
+ }
+
+ def skipMembers(): Unit = {
+ val memberCount = in.nextChar
+ for (i <- 0 until memberCount) {
+ in.skip(6); skipAttributes()
+ }
+ }
+
+ def skipSuperclasses(): Unit = {
+ in.skip(2) // superclass
+ val ifaces = in.nextChar
+ in.skip(2 * ifaces)
+ }
+
+ protected def getOwner(flags: Int): Symbol =
+ if (isStatic(flags)) moduleRoot.symbol else classRoot.symbol
+
+ protected def getScope(flags: Int): MutableScope =
+ if (isStatic(flags)) staticScope else instanceScope
+
+ private def setPrivateWithin(denot: SymDenotation, jflags: Int)(implicit ctx: Context): Unit = {
+ if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PUBLIC)) == 0)
+ denot.privateWithin = denot.enclosingPackageClass
+ }
+
+ private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
+ private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
+ private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+
+ class ConstantPool {
+ private val len = in.nextChar
+ private val starts = new Array[Int](len)
+ private val values = new Array[AnyRef](len)
+ private val internalized = new Array[TermName](len)
+
+ { var i = 1
+ while (i < starts.length) {
+ starts(i) = in.bp
+ i += 1
+ (in.nextByte.toInt: @switch) match {
+ case CONSTANT_UTF8 | CONSTANT_UNICODE =>
+ in.skip(in.nextChar)
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE =>
+ in.skip(2)
+ case CONSTANT_METHODHANDLE =>
+ in.skip(3)
+ case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
+ | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
+ | CONSTANT_INVOKEDYNAMIC =>
+ in.skip(4)
+ case CONSTANT_LONG | CONSTANT_DOUBLE =>
+ in.skip(8)
+ i += 1
+ case _ =>
+ errorBadTag(in.bp - 1)
+ }
+ }
+ }
+
+ /** Return the name found at given index. */
+ def getName(index: Int): TermName = {
+ if (index <= 0 || len <= index)
+ errorBadIndex(index)
+
+ values(index) match {
+ case name: TermName => name
+ case null =>
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val name = termName(in.buf, start + 3, in.getChar(start + 1))
+ values(index) = name
+ name
+ }
+ }
+
+ /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
+ def getExternalName(index: Int): TermName = {
+ if (index <= 0 || len <= index)
+ errorBadIndex(index)
+
+ if (internalized(index) == null)
+ internalized(index) = getName(index).replace('/', '.')
+
+ internalized(index)
+ }
+
+ def getClassSymbol(index: Int)(implicit ctx: Context): Symbol = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var c = values(index).asInstanceOf[Symbol]
+ if (c eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
+ val name = getExternalName(in.getChar(start + 1))
+ if (name.isModuleClassName && (name ne nme.nothingRuntimeClass) && (name ne nme.nullRuntimeClass))
+ // Null$ and Nothing$ ARE classes
+ c = ctx.requiredModule(name.sourceModuleName)
+ else c = classNameToSymbol(name)
+ values(index) = c
+ }
+ c
+ }
+
+ /** Return the external name of the class info structure found at 'index'.
+ * Use 'getClassSymbol' if the class is sure to be a top-level class.
+ */
+ def getClassName(index: Int): TermName = {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
+ getExternalName(in.getChar(start + 1))
+ }
+
+ /** Return a name and a type at the given index.
+ */
+ private def getNameAndType(index: Int, ownerTpe: Type)(implicit ctx: Context): (Name, Type) = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var p = values(index).asInstanceOf[(Name, Type)]
+ if (p eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
+ val name = getName(in.getChar(start + 1).toInt)
+ var tpe = getType(in.getChar(start + 3).toInt)
+ // fix the return type, which is blindly set to the class currently parsed
+ if (name == nme.CONSTRUCTOR)
+ tpe match {
+ case tp: MethodType =>
+ tp.derivedMethodType(tp.paramNames, tp.paramTypes, ownerTpe)
+ }
+ p = (name, tpe)
+ values(index) = p
+ }
+ p
+ }
+
+ /** Return the type of a class constant entry. Since
+ * arrays are considered to be class types, they might
+ * appear as entries in 'newarray' or 'cast' opcodes.
+ */
+ def getClassOrArrayType(index: Int)(implicit ctx: Context): Type = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ val value = values(index)
+ var c: Type = null
+ if (value eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
+ val name = getExternalName(in.getChar(start + 1))
+ if (name(0) == ARRAY_TAG) {
+ c = sigToType(name)
+ values(index) = c
+ } else {
+ val sym = classNameToSymbol(name)
+ values(index) = sym
+ c = sym.typeRef
+ }
+ } else c = value match {
+ case tp: Type => tp
+ case cls: Symbol => cls.typeRef
+ }
+ c
+ }
+
+ def getType(index: Int)(implicit ctx: Context): Type =
+ sigToType(getExternalName(index))
+
+ def getSuperClass(index: Int)(implicit ctx: Context): Symbol = {
+ assert(index != 0, "attempt to parse java.lang.Object from classfile")
+ getClassSymbol(index)
+ }
+
+ def getConstant(index: Int)(implicit ctx: Context): Constant = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var value = values(index)
+ if (value eq null) {
+ val start = starts(index)
+ value = (in.buf(start).toInt: @switch) match {
+ case CONSTANT_STRING =>
+ Constant(getName(in.getChar(start + 1).toInt).toString)
+ case CONSTANT_INTEGER =>
+ Constant(in.getInt(start + 1))
+ case CONSTANT_FLOAT =>
+ Constant(in.getFloat(start + 1))
+ case CONSTANT_LONG =>
+ Constant(in.getLong(start + 1))
+ case CONSTANT_DOUBLE =>
+ Constant(in.getDouble(start + 1))
+ case CONSTANT_CLASS =>
+ getClassOrArrayType(index).typeSymbol
+ case _ =>
+ errorBadTag(start)
+ }
+ values(index) = value
+ }
+ value match {
+ case ct: Constant => ct
+ case cls: Symbol => Constant(cls.typeRef)
+ case arr: Type => Constant(arr)
+ }
+ }
+
+ private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
+ val decodedLength = ByteCodecs.decode(bytes)
+ val arr = new Array[Byte](decodedLength)
+ System.arraycopy(bytes, 0, arr, 0, decodedLength)
+ arr
+ }
+
+ def getBytes(index: Int): Array[Byte] = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var value = values(index).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ val bytes = new Array[Byte](len)
+ System.arraycopy(in.buf, start + 3, bytes, 0, len)
+ value = getSubArray(bytes)
+ values(index) = value
+ }
+ value
+ }
+
+ def getBytes(indices: List[Int]): Array[Byte] = {
+ assert(!indices.isEmpty, indices)
+ var value = values(indices.head).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val bytesBuffer = ArrayBuffer.empty[Byte]
+ for (index <- indices) {
+ if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
+ }
+ value = getSubArray(bytesBuffer.toArray)
+ values(indices.head) = value
+ }
+ value
+ }
+
+ /** Throws an exception signaling a bad constant index. */
+ private def errorBadIndex(index: Int) =
+ throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
+
+ /** Throws an exception signaling a bad tag at given address. */
+ private def errorBadTag(start: Int) =
+ throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start)
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
new file mode 100644
index 000000000..2c93819d5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -0,0 +1,53 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, SymDenotations._, Symbols._
+import dotty.tools.dotc.ast.tpd
+import TastyUnpickler._, TastyBuffer._
+import util.Positions._
+import util.{SourceFile, NoSource}
+import Annotations.Annotation
+import core.Mode
+import classfile.ClassfileParser
+
+object DottyUnpickler {
+
+ /** Exception thrown if classfile is corrupted */
+ class BadSignature(msg: String) extends RuntimeException(msg)
+
+ class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler])
+ extends SectionUnpickler[TreeUnpickler]("ASTs") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new TreeUnpickler(reader, tastyName, posUnpickler)
+ }
+
+ class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new PositionUnpickler(reader)
+ }
+}
+
+/** A class for unpickling Tasty trees and symbols.
+ * @param bytes the bytearray containing the Tasty file from which we unpickle
+ */
+class DottyUnpickler(bytes: Array[Byte]) extends ClassfileParser.Embedded {
+ import tpd._
+ import DottyUnpickler._
+
+ val unpickler = new TastyUnpickler(bytes)
+ private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler)
+ private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler(posUnpicklerOpt)).get
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
+ treeUnpickler.enterTopLevel(roots)
+
+ /** The unpickled trees, and the source file they come from. */
+ def body(implicit ctx: Context): List[Tree] = {
+ treeUnpickler.unpickle()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
new file mode 100644
index 000000000..3ff7298ce
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -0,0 +1,101 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import collection.mutable
+import Names.{Name, chrs}
+import Decorators._, NameOps._
+import TastyBuffer._
+import scala.io.Codec
+import TastyName._
+import TastyFormat._
+
+class NameBuffer extends TastyBuffer(10000) {
+ import NameBuffer._
+
+ private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef]
+
+ def nameIndex(name: TastyName): NameRef = nameRefs.get(name) match {
+ case Some(ref) =>
+ ref
+ case None =>
+ val ref = NameRef(nameRefs.size)
+ nameRefs(name) = ref
+ ref
+ }
+ def nameIndex(name: Name): NameRef = {
+ val tname =
+ if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed))
+ else Simple(name.toTermName)
+ nameIndex(tname)
+ }
+
+ def nameIndex(str: String): NameRef = nameIndex(str.toTermName)
+
+ def fullNameIndex(name: Name): NameRef = {
+ val pos = name.lastIndexOf('.')
+ if (pos > 0)
+ nameIndex(Qualified(fullNameIndex(name.take(pos)), nameIndex(name.drop(pos + 1))))
+ else
+ nameIndex(name)
+ }
+
+ private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = {
+ val lengthAddr = currentAddr
+ for (i <- 0 until lengthWidth) writeByte(0)
+ op
+ val length = currentAddr.index - lengthAddr.index - 1
+ putNat(lengthAddr, length, lengthWidth)
+ }
+
+ def writeNameRef(ref: NameRef) = writeNat(ref.index)
+
+ def pickleName(name: TastyName): Unit = name match {
+ case Simple(name) =>
+ val bytes =
+ if (name.length == 0) new Array[Byte](0)
+ else Codec.toUTF8(chrs, name.start, name.length)
+ writeByte(UTF8)
+ writeNat(bytes.length)
+ writeBytes(bytes, bytes.length)
+ case Qualified(qualified, selector) =>
+ writeByte(QUALIFIED)
+ withLength { writeNameRef(qualified); writeNameRef(selector) }
+ case Signed(original, params, result) =>
+ writeByte(SIGNED)
+ withLength(
+ { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) },
+ if ((params.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2)
+ case Expanded(prefix, original) =>
+ writeByte(EXPANDED)
+ withLength { writeNameRef(prefix); writeNameRef(original) }
+ case ModuleClass(module) =>
+ writeByte(OBJECTCLASS)
+ withLength { writeNameRef(module) }
+ case SuperAccessor(accessed) =>
+ writeByte(SUPERACCESSOR)
+ withLength { writeNameRef(accessed) }
+ case DefaultGetter(method, paramNumber) =>
+ writeByte(DEFAULTGETTER)
+ withLength { writeNameRef(method); writeNat(paramNumber) }
+ case Shadowed(original) =>
+ writeByte(SHADOWED)
+ withLength { writeNameRef(original) }
+ }
+
+ override def assemble(): Unit = {
+ var i = 0
+ for ((name, ref) <- nameRefs) {
+ assert(ref.index == i)
+ i += 1
+ pickleName(name)
+ }
+ }
+}
+
+object NameBuffer {
+ private val maxIndexWidth = 3 // allows name indices up to 2^21.
+ private val payloadBitsPerByte = 7 // determined by nat encoding in TastyBuffer
+ private val maxNumInByte = (1 << payloadBitsPerByte) - 1
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
new file mode 100644
index 000000000..546894a9e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -0,0 +1,79 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast._
+import ast.Trees._
+import ast.Trees.WithLazyField
+import TastyFormat._
+import core._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._
+import collection.mutable
+import TastyBuffer._
+import util.Positions._
+
+class PositionPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Option[Addr]) {
+ val buf = new TastyBuffer(5000)
+ pickler.newSection("Positions", buf)
+ import buf._
+ import ast.tpd._
+
+ private val remainingAddrs = new java.util.IdentityHashMap[Tree, Iterator[Addr]]
+
+ def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean) = {
+ def toInt(b: Boolean) = if (b) 1 else 0
+ (addrDelta << 3) | (toInt(hasStartDelta) << 2) | (toInt(hasEndDelta) << 1) | toInt(hasPoint)
+ }
+
+ def picklePositions(roots: List[Tree])(implicit ctx: Context) = {
+ var lastIndex = 0
+ var lastPos = Position(0, 0)
+ def pickleDeltas(index: Int, pos: Position) = {
+ val addrDelta = index - lastIndex
+ val startDelta = pos.start - lastPos.start
+ val endDelta = pos.end - lastPos.end
+ buf.writeInt(header(addrDelta, startDelta != 0, endDelta != 0, !pos.isSynthetic))
+ if (startDelta != 0) buf.writeInt(startDelta)
+ if (endDelta != 0) buf.writeInt(endDelta)
+ if (!pos.isSynthetic) buf.writeInt(pos.pointDelta)
+ lastIndex = index
+ lastPos = pos
+ }
+
+ /** True if x's position cannot be reconstructed automatically from its initialPos
+ */
+ def alwaysNeedsPos(x: Positioned) = x match {
+ case _: WithLazyField[_] // initialPos is inaccurate for trees with lazy field
+ | _: Trees.PackageDef[_] => true // package defs might be split into several Tasty files
+ case _ => false
+ }
+
+ def traverse(x: Any): Unit = x match {
+ case x: Tree @unchecked =>
+ val pos = if (x.isInstanceOf[MemberDef]) x.pos else x.pos.toSynthetic
+ if (pos.exists && (pos != x.initialPos.toSynthetic || alwaysNeedsPos(x))) {
+ addrOfTree(x) match {
+ case Some(addr) =>
+ //println(i"pickling $x with $pos at $addr")
+ pickleDeltas(addr.index, pos)
+ case _ =>
+ //println(i"no address for $x")
+ }
+ }
+ //else if (x.pos.exists) println(i"skipping $x")
+ x match {
+ case x: MemberDef @unchecked =>
+ for (ann <- x.symbol.annotations) traverse(ann.tree)
+ case _ =>
+ }
+ traverse(x.productIterator)
+ case xs: TraversableOnce[_] =>
+ xs.foreach(traverse)
+ case x: Annotation =>
+ traverse(x.tree)
+ case _ =>
+ }
+ traverse(roots)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
new file mode 100644
index 000000000..cbe213d89
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
@@ -0,0 +1,39 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+
+import util.Positions._
+import collection.mutable
+import TastyBuffer.{Addr, NoAddr}
+
+/** Unpickler for tree positions */
+class PositionUnpickler(reader: TastyReader) {
+ import reader._
+
+ private[tasty] lazy val positions = {
+ val positions = new mutable.HashMap[Addr, Position]
+ var curIndex = 0
+ var curStart = 0
+ var curEnd = 0
+ while (!isAtEnd) {
+ val header = readInt()
+ val addrDelta = header >> 3
+ val hasStart = (header & 4) != 0
+ val hasEnd = (header & 2) != 0
+ val hasPoint = (header & 1) != 0
+ curIndex += addrDelta
+ assert(curIndex >= 0)
+ if (hasStart) curStart += readInt()
+ if (hasEnd) curEnd += readInt()
+ positions(Addr(curIndex)) =
+ if (hasPoint) Position(curStart, curEnd, curStart + readInt())
+ else Position(curStart, curEnd)
+ }
+ positions
+ }
+
+ def posAt(addr: Addr) = positions.getOrElse(addr, NoPosition)
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
new file mode 100644
index 000000000..13bc95028
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.dble
+
+object TastyBuffer {
+
+ /** The number of digits of the natural number `nat`, written in base 128 format. */
+ def natSize(nat: Int): Int =
+ if (nat < 128) 1 else natSize(nat >>> 7) + 1
+
+ /** An address pointing to an index in a Tasty buffer's byte array */
+ case class Addr(index: Int) extends AnyVal {
+ def - (delta: Int): Addr = Addr(this.index - delta)
+ def + (delta: Int): Addr = Addr(this.index + delta)
+
+ def relativeTo(base: Addr): Addr = this - base.index - AddrWidth
+ }
+
+ val NoAddr = Addr(-1)
+
+ /** The maximal number of address bytes.
+ * Since addresses are written as base-128 natural numbers,
+ * the value of 4 gives a maximal array size of 256M.
+ */
+ final val AddrWidth = 4
+}
+import TastyBuffer._
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ */
+class TastyBuffer(initialSize: Int) {
+
+ /** The current byte array, will be expanded as needed */
+ var bytes = new Array[Byte](initialSize)
+
+ /** The number of bytes written */
+ var length = 0
+
+ // -- Output routines --------------------------------------------
+
+ /** Write a byte of data. */
+ def writeByte(b: Int): Unit = {
+ if (length >= bytes.length)
+ bytes = dble(bytes)
+ bytes(length) = b.toByte
+ length += 1
+ }
+
+ /** Write the first `n` bytes of `data`. */
+ def writeBytes(data: Array[Byte], n: Int): Unit = {
+ while (bytes.length < length + n) bytes = dble(bytes)
+ Array.copy(data, 0, bytes, length, n)
+ length += n
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int): Unit =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /** Write a natural number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeInt(x: Int): Unit =
+ writeLongInt(x)
+
+ /**
+ * Like writeNat, but for longs. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Like writeInt, but for longs */
+ def writeLongInt(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Write an uncompressed Long stored in 8 bytes in big endian format */
+ def writeUncompressedLong(x: Long): Unit = {
+ var y = x
+ val bytes = new Array[Byte](8)
+ for (i <- 7 to 0 by -1) {
+ bytes(i) = (y & 0xff).toByte
+ y = y >>> 8
+ }
+ writeBytes(bytes, 8)
+ }
+
+ // -- Address handling --------------------------------------------
+
+ /** Write natural number `x` right-adjusted in a field of `width` bytes
+ * starting with address `at`.
+ */
+ def putNat(at: Addr, x: Int, width: Int): Unit = {
+ var y = x
+ var w = width
+ if(at.index + w >= bytes.length)
+ bytes = dble(bytes)
+ var digit = y & 0x7f | 0x80
+ while (w > 0) {
+ w -= 1
+ bytes(at.index + w) = digit.toByte
+ y >>>= 7
+ digit = y & 0x7f
+ }
+ assert(y == 0, s"number $x too large to fit in $width bytes")
+ }
+
+ /** The byte at given address */
+ def getByte(at: Addr): Int = bytes(at.index)
+
+ /** The natural number at address `at` */
+ def getNat(at: Addr): Int = getLongNat(at).toInt
+
+ /** The long natural number at address `at` */
+ def getLongNat(at: Addr): Long = {
+ var b = 0L
+ var x = 0L
+ var idx = at.index
+ do {
+ b = bytes(idx)
+ x = (x << 7) | (b & 0x7f)
+ idx += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** The address (represented as a natural number) at address `at` */
+ def getAddr(at: Addr) = Addr(getNat(at))
+
+ /** The smallest address equal to or following `at` which points to a non-zero byte */
+ final def skipZeroes(at: Addr): Addr =
+ if (getByte(at) != 0) at else skipZeroes(at + 1)
+
+ /** The address after the natural number found at address `at`. */
+ final def skipNat(at: Addr): Addr = {
+ val next = at + 1
+ if ((getByte(at) & 0x80) != 0) next else skipNat(next)
+ }
+
+ /** The address referring to the end of data written so far */
+ def currentAddr: Addr = Addr(length)
+
+ /** Reserve `AddrWidth` bytes to write an address into */
+ def reserveAddr(): Addr = {
+ val result = currentAddr
+ length += AddrWidth
+ result
+ }
+
+ /** Fill reserved space at address `at` with address `target` */
+ def fillAddr(at: Addr, target: Addr) =
+ putNat(at, target.index, AddrWidth)
+
+ /** Write address without leading zeroes */
+ def writeAddr(addr: Addr): Unit = writeNat(addr.index)
+
+ // -- Finalization --------------------------------------------
+
+ /** Hook to be overridden in subclasses.
+ * Perform all actions necessary to assemble the final byte array.
+ * After `assemble` no more output actions to this buffer are permitted.
+ */
+ def assemble(): Unit = ()
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
new file mode 100644
index 000000000..cb1b56c3c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
@@ -0,0 +1,553 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+/************************************************************
+Notation:
+
+We use BNF notation. Terminal symbols start with at least two
+consecutive upper case letters. Each terminal is represented as a
+single byte tag. Non-terminals are mixed case. Prefixes of the form
+lower case letter*_ are for explanation of semantic content only, they
+can be dropped without changing the grammar.
+
+Micro-syntax:
+
+ LongInt = Digit* StopDigit // big endian 2's complement, value fits in a Long w/o overflow
+ Int = LongInt // big endian 2's complement, fits in an Int w/o overflow
+ Nat = LongInt // non-negative value, fits in an Int without overflow
+ Digit = 0 | ... | 127
+ StopDigit = 128 | ... | 255 // value = digit - 128
+
+Macro-format:
+
+ File = Header majorVersion_Nat minorVersion_Nat UUID
+ nameTable_Length Name* Section*
+ Header = 0x5CA1AB1F
+ UUID = Byte*16 // random UUID
+
+ Section = NameRef Length Bytes
+ Length = Nat // length of rest of entry in bytes
+
+ Name = UTF8 Length UTF8-CodePoint*
+ QUALIFIED Length qualified_NameRef selector_NameRef
+ SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef*
+ EXPANDED Length original_NameRef
+ OBJECTCLASS Length module_NameRef
+ SUPERACCESSOR Length accessed_NameRef
+ DEFAULTGETTER Length method_NameRef paramNumber_Nat
+ SHADOWED Length original_NameRef
+ MANGLED Length mangle_NameRef name_NameRef
+ ...
+
+ NameRef = Nat // ordinal number of name in name table, starting from 1.
+
+Note: Unqualified names in the name table are strings. The context decides whether a name is
+a type-name or a term-name. The same string can represent both.
+
+Standard-Section: "ASTs" TopLevelStat*
+
+ TopLevelStat = PACKAGE Length Path TopLevelStat*
+ Stat
+
+ Stat = Term
+ VALDEF Length NameRef Type rhs_Term? Modifier*
+ DEFDEF Length NameRef TypeParam* Params* return_Type rhs_Term?
+ Modifier*
+ TYPEDEF Length NameRef (Type | Template) Modifier*
+ IMPORT Length qual_Term Selector*
+ Selector = IMPORTED name_NameRef
+ RENAMED to_NameRef
+
+ // Imports are for scala.meta, they are not used in the backend
+
+ TypeParam = TYPEPARAM Length NameRef Type Modifier*
+ Params = PARAMS Length Param*
+ Param = PARAM Length NameRef Type rhs_Term? Modifier* // rhs_Term is present in the case of an aliased class parameter
+ Template = TEMPLATE Length TypeParam* Param* Parent* Self? Stat* // Stat* always starts with the primary constructor.
+ Parent = Application
+ Type
+ Self = SELFDEF selfName_NameRef selfType_Type
+
+ Term = Path
+ Application
+ IDENT NameRef Type // used when term ident’s type is not a TermRef
+ SELECT possiblySigned_NameRef qual_Term
+ QUALTHIS typeIdent_Tree
+ NEW cls_Type
+ SUPER Length this_Term mixinTypeIdent_Tree?
+ TYPED Length expr_Term ascription_Type
+ NAMEDARG Length paramName_NameRef arg_Term
+ ASSIGN Length lhs_Term rhs_Term
+ BLOCK Length expr_Term Stat*
+ INLINED Length call_Term expr_Term Stat*
+ LAMBDA Length meth_Term target_Type
+ IF Length cond_Term then_Term else_Term
+ MATCH Length sel_Term CaseDef*
+ TRY Length expr_Term CaseDef* finalizer_Term?
+ RETURN Length meth_ASTRef expr_Term?
+ REPEATED Length elem_Type elem_Term*
+ BIND Length boundName_NameRef patType_Type pat_Term
+ ALTERNATIVE Length alt_Term*
+ UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term*
+ IDENTtpt NameRef Type // used for all type idents
+ SELECTtpt NameRef qual_Term
+ SINGLETONtpt Path
+ REFINEDtpt Length underlying_Term refinement_Stat*
+ APPLIEDtpt Length tycon_Term arg_Term*
+ POLYtpt Length TypeParam* body_Term
+ TYPEBOUNDStpt Length low_Term high_Term
+ ANNOTATEDtpt Length underlying_Term fullAnnotation_Term
+ ANDtpt Length left_Term right_Term
+ ORtpt Length left_Term right_Term
+ BYNAMEtpt underlying_Term
+ EMPTYTREE
+ SHARED term_ASTRef
+ Application = APPLY Length fn_Term arg_Term*
+
+ TYPEAPPLY Length fn_Term arg_Type*
+ CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree?
+ ImplicitArg = IMPLICITARG arg_Term
+ ASTRef = Nat // byte position in AST payload
+
+ Path = Constant
+ TERMREFdirect sym_ASTRef
+ TERMREFsymbol sym_ASTRef qual_Type
+ TERMREFpkg fullyQualified_NameRef
+ TERMREF possiblySigned_NameRef qual_Type
+ THIS clsRef_Type
+ RECthis recType_ASTRef
+ SHARED path_ASTRef
+
+ Constant = UNITconst
+ FALSEconst
+ TRUEconst
+ BYTEconst Int
+ SHORTconst Int
+ CHARconst Nat
+ INTconst Int
+ LONGconst LongInt
+ FLOATconst Int
+ DOUBLEconst LongInt
+ STRINGconst NameRef
+ NULLconst
+ CLASSconst Type
+ ENUMconst Path
+
+ Type = Path
+ TYPEREFdirect sym_ASTRef
+ TYPEREFsymbol sym_ASTRef qual_Type
+ TYPEREFpkg fullyQualified_NameRef
+ TYPEREF possiblySigned_NameRef qual_Type
+ RECtype parent_Type
+ SUPERtype Length this_Type underlying_Type
+ REFINEDtype Length underlying_Type refinement_NameRef info_Type
+ APPLIEDtype Length tycon_Type arg_Type*
+ TYPEBOUNDS Length low_Type high_Type
+ TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)?
+ ANNOTATEDtype Length underlying_Type fullAnnotation_Term
+ ANDtype Length left_Type right_Type
+ ORtype Length left_Type right_Type
+ BIND Length boundName_NameRef bounds_Type
+ // for type-variables defined in a type pattern
+ BYNAMEtype underlying_Type
+ POLYtype Length result_Type NamesTypes // variance encoded in front of name: +/-/=
+ METHODtype Length result_Type NamesTypes // needed for refinements
+ PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements
+ SHARED type_ASTRef
+ NamesTypes = NameType*
+ NameType = paramName_NameRef typeOrBounds_ASTRef
+
+ Modifier = PRIVATE
+ INTERNAL // package private
+ PROTECTED
+ PRIVATEqualified qualifier_Type // will be dropped
+ PROTECTEDqualified qualifier_Type // will be dropped
+ ABSTRACT
+ FINAL
+ SEALED
+ CASE
+ IMPLICIT
+ LAZY
+ OVERRIDE
+ INLINE // macro
+ STATIC // mapped to static Java member
+ OBJECT // an object or its class
+ TRAIT // a trait
+ LOCAL // private[this] or protected[this]
+ SYNTHETIC // generated by Scala compiler
+ ARTIFACT // to be tagged Java Synthetic
+ MUTABLE // a var
+ LABEL // method generated as a label
+ FIELDaccessor // getter or setter
+ CASEaccessor // getter for case class param
+ COVARIANT // type param marked “+”
+ CONTRAVARIANT // type param marked “-”
+ SCALA2X // Imported from Scala2.x
+ DEFAULTparameterized // Method with default params
+ INSUPERCALL // defined in the argument of a constructor supercall
+ STABLE // Method that is assumed to be stable
+ Annotation
+ Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term
+
+Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way.
+
+ Category 1 (tags 0-63) : tag
+ Category 2 (tags 64-95) : tag Nat
+ Category 3 (tags 96-111) : tag AST
+ Category 4 (tags 112-127): tag Nat AST
+ Category 5 (tags 128-255): tag Length <payload>
+
+Standard Section: "Positions" Assoc*
+
+ Assoc = Header offset_Delta? offset_Delta?
+ Header = addr_Delta + // in one Nat: difference of address to last recorded node << 2 +
+ hasStartDiff + // one bit indicating whether there follows a start address delta << 1
+ hasEndDiff // one bit indicating whether there follows an end address delta
+ // Nodes which have the same positions as their parents are omitted.
+ // offset_Deltas give difference of start/end offset wrt to the
+ // same offset in the previously recorded node (or 0 for the first recorded node)
+ Delta = Int // Difference between consecutive offsets,
+
+**************************************************************************************/
+
+object TastyFormat {
+
+ final val header = Array(0x5C, 0xA1, 0xAB, 0x1F)
+ final val MajorVersion = 0
+ final val MinorVersion = 5
+
+ // Name tags
+
+ final val UTF8 = 1
+ final val QUALIFIED = 2
+ final val SIGNED = 3
+ final val EXPANDED = 4
+ final val OBJECTCLASS = 5
+ final val SUPERACCESSOR = 6
+ final val DEFAULTGETTER = 7
+ final val SHADOWED = 8
+
+ // AST tags
+
+ final val UNITconst = 2
+ final val FALSEconst = 3
+ final val TRUEconst = 4
+ final val NULLconst = 5
+ final val PRIVATE = 6
+ final val INTERNAL = 7
+ final val PROTECTED = 8
+ final val ABSTRACT = 9
+ final val FINAL = 10
+ final val SEALED = 11
+ final val CASE = 12
+ final val IMPLICIT = 13
+ final val LAZY = 14
+ final val OVERRIDE = 15
+ final val INLINE = 16
+ final val STATIC = 17
+ final val OBJECT = 18
+ final val TRAIT = 19
+ final val LOCAL = 20
+ final val SYNTHETIC = 21
+ final val ARTIFACT = 22
+ final val MUTABLE = 23
+ final val LABEL = 24
+ final val FIELDaccessor = 25
+ final val CASEaccessor = 26
+ final val COVARIANT = 27
+ final val CONTRAVARIANT = 28
+ final val SCALA2X = 29
+ final val DEFAULTparameterized = 30
+ final val INSUPERCALL = 31
+ final val STABLE = 32
+
+ final val SHARED = 64
+ final val TERMREFdirect = 65
+ final val TYPEREFdirect = 66
+ final val TERMREFpkg = 67
+ final val TYPEREFpkg = 68
+ final val RECthis = 69
+ final val BYTEconst = 70
+ final val SHORTconst = 71
+ final val CHARconst = 72
+ final val INTconst = 73
+ final val LONGconst = 74
+ final val FLOATconst = 75
+ final val DOUBLEconst = 76
+ final val STRINGconst = 77
+ final val IMPORTED = 78
+ final val RENAMED = 79
+
+ final val THIS = 96
+ final val QUALTHIS = 97
+ final val CLASSconst = 98
+ final val ENUMconst = 99
+ final val BYNAMEtype = 100
+ final val BYNAMEtpt = 101
+ final val NEW = 102
+ final val IMPLICITarg = 103
+ final val PRIVATEqualified = 104
+ final val PROTECTEDqualified = 105
+ final val RECtype = 106
+ final val SINGLETONtpt = 107
+
+ final val IDENT = 112
+ final val IDENTtpt = 113
+ final val SELECT = 114
+ final val SELECTtpt = 115
+ final val TERMREFsymbol = 116
+ final val TERMREF = 117
+ final val TYPEREFsymbol = 118
+ final val TYPEREF = 119
+ final val SELFDEF = 120
+
+ final val PACKAGE = 128
+ final val VALDEF = 129
+ final val DEFDEF = 130
+ final val TYPEDEF = 131
+ final val IMPORT = 132
+ final val TYPEPARAM = 133
+ final val PARAMS = 134
+ final val PARAM = 136
+ final val APPLY = 137
+ final val TYPEAPPLY = 138
+ final val TYPED = 139
+ final val NAMEDARG = 140
+ final val ASSIGN = 141
+ final val BLOCK = 142
+ final val IF = 143
+ final val LAMBDA = 144
+ final val MATCH = 145
+ final val RETURN = 146
+ final val TRY = 147
+ final val INLINED = 148
+ final val REPEATED = 149
+ final val BIND = 150
+ final val ALTERNATIVE = 151
+ final val UNAPPLY = 152
+ final val ANNOTATEDtype = 153
+ final val ANNOTATEDtpt = 154
+ final val CASEDEF = 155
+ final val TEMPLATE = 156
+ final val SUPER = 157
+ final val SUPERtype = 158
+ final val REFINEDtype = 159
+ final val REFINEDtpt = 160
+ final val APPLIEDtype = 161
+ final val APPLIEDtpt = 162
+ final val TYPEBOUNDS = 163
+ final val TYPEBOUNDStpt = 164
+ final val TYPEALIAS = 165
+ final val ANDtype = 166
+ final val ANDtpt = 167
+ final val ORtype = 168
+ final val ORtpt = 169
+ final val METHODtype = 170
+ final val POLYtype = 171
+ final val POLYtpt = 172
+ final val PARAMtype = 173
+ final val ANNOTATION = 174
+
+ final val firstSimpleTreeTag = UNITconst
+ final val firstNatTreeTag = SHARED
+ final val firstASTTreeTag = THIS
+ final val firstNatASTTreeTag = IDENT
+ final val firstLengthTreeTag = PACKAGE
+
+ def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM
+
+ def isModifierTag(tag: Int) = tag match {
+ case PRIVATE
+ | INTERNAL
+ | PROTECTED
+ | ABSTRACT
+ | FINAL
+ | SEALED
+ | CASE
+ | IMPLICIT
+ | LAZY
+ | OVERRIDE
+ | INLINE
+ | STATIC
+ | OBJECT
+ | TRAIT
+ | LOCAL
+ | SYNTHETIC
+ | ARTIFACT
+ | MUTABLE
+ | LABEL
+ | FIELDaccessor
+ | CASEaccessor
+ | COVARIANT
+ | CONTRAVARIANT
+ | SCALA2X
+ | DEFAULTparameterized
+ | INSUPERCALL
+ | STABLE
+ | ANNOTATION
+ | PRIVATEqualified
+ | PROTECTEDqualified => true
+ case _ => false
+ }
+
+ def isTypeTreeTag(tag: Int) = tag match {
+ case IDENTtpt
+ | SELECTtpt
+ | SINGLETONtpt
+ | REFINEDtpt
+ | APPLIEDtpt
+ | POLYtpt
+ | TYPEBOUNDStpt
+ | ANNOTATEDtpt
+ | ANDtpt
+ | ORtpt
+ | BYNAMEtpt => true
+ case _ => false
+ }
+
+ def nameTagToString(tag: Int): String = tag match {
+ case UTF8 => "UTF8"
+ case QUALIFIED => "QUALIFIED"
+ case SIGNED => "SIGNED"
+ case EXPANDED => "EXPANDED"
+ case OBJECTCLASS => "OBJECTCLASS"
+ case SUPERACCESSOR => "SUPERACCESSOR"
+ case DEFAULTGETTER => "DEFAULTGETTER"
+ }
+
+ def astTagToString(tag: Int): String = tag match {
+ case UNITconst => "UNITconst"
+ case FALSEconst => "FALSEconst"
+ case TRUEconst => "TRUEconst"
+ case NULLconst => "NULLconst"
+ case PRIVATE => "PRIVATE"
+ case INTERNAL => "INTERNAL"
+ case PROTECTED => "PROTECTED"
+ case ABSTRACT => "ABSTRACT"
+ case FINAL => "FINAL"
+ case SEALED => "SEALED"
+ case CASE => "CASE"
+ case IMPLICIT => "IMPLICIT"
+ case LAZY => "LAZY"
+ case OVERRIDE => "OVERRIDE"
+ case INLINE => "INLINE"
+ case STATIC => "STATIC"
+ case OBJECT => "OBJECT"
+ case TRAIT => "TRAIT"
+ case LOCAL => "LOCAL"
+ case SYNTHETIC => "SYNTHETIC"
+ case ARTIFACT => "ARTIFACT"
+ case MUTABLE => "MUTABLE"
+ case LABEL => "LABEL"
+ case FIELDaccessor => "FIELDaccessor"
+ case CASEaccessor => "CASEaccessor"
+ case COVARIANT => "COVARIANT"
+ case CONTRAVARIANT => "CONTRAVARIANT"
+ case SCALA2X => "SCALA2X"
+ case DEFAULTparameterized => "DEFAULTparameterized"
+ case INSUPERCALL => "INSUPERCALL"
+ case STABLE => "STABLE"
+
+ case SHARED => "SHARED"
+ case TERMREFdirect => "TERMREFdirect"
+ case TYPEREFdirect => "TYPEREFdirect"
+ case TERMREFpkg => "TERMREFpkg"
+ case TYPEREFpkg => "TYPEREFpkg"
+ case RECthis => "RECthis"
+ case BYTEconst => "BYTEconst"
+ case SHORTconst => "SHORTconst"
+ case CHARconst => "CHARconst"
+ case INTconst => "INTconst"
+ case LONGconst => "LONGconst"
+ case FLOATconst => "FLOATconst"
+ case DOUBLEconst => "DOUBLEconst"
+ case STRINGconst => "STRINGconst"
+ case RECtype => "RECtype"
+
+ case IDENT => "IDENT"
+ case IDENTtpt => "IDENTtpt"
+ case SELECT => "SELECT"
+ case SELECTtpt => "SELECTtpt"
+ case TERMREFsymbol => "TERMREFsymbol"
+ case TERMREF => "TERMREF"
+ case TYPEREFsymbol => "TYPEREFsymbol"
+ case TYPEREF => "TYPEREF"
+
+ case PACKAGE => "PACKAGE"
+ case VALDEF => "VALDEF"
+ case DEFDEF => "DEFDEF"
+ case TYPEDEF => "TYPEDEF"
+ case IMPORT => "IMPORT"
+ case TYPEPARAM => "TYPEPARAM"
+ case PARAMS => "PARAMS"
+ case PARAM => "PARAM"
+ case IMPORTED => "IMPORTED"
+ case RENAMED => "RENAMED"
+ case APPLY => "APPLY"
+ case TYPEAPPLY => "TYPEAPPLY"
+ case NEW => "NEW"
+ case TYPED => "TYPED"
+ case NAMEDARG => "NAMEDARG"
+ case ASSIGN => "ASSIGN"
+ case BLOCK => "BLOCK"
+ case IF => "IF"
+ case LAMBDA => "LAMBDA"
+ case MATCH => "MATCH"
+ case RETURN => "RETURN"
+ case INLINED => "INLINED"
+ case TRY => "TRY"
+ case REPEATED => "REPEATED"
+ case BIND => "BIND"
+ case ALTERNATIVE => "ALTERNATIVE"
+ case UNAPPLY => "UNAPPLY"
+ case ANNOTATEDtype => "ANNOTATEDtype"
+ case ANNOTATEDtpt => "ANNOTATEDtpt"
+ case CASEDEF => "CASEDEF"
+ case IMPLICITarg => "IMPLICITarg"
+ case TEMPLATE => "TEMPLATE"
+ case SELFDEF => "SELFDEF"
+ case THIS => "THIS"
+ case QUALTHIS => "QUALTHIS"
+ case SUPER => "SUPER"
+ case CLASSconst => "CLASSconst"
+ case ENUMconst => "ENUMconst"
+ case SINGLETONtpt => "SINGLETONtpt"
+ case SUPERtype => "SUPERtype"
+ case REFINEDtype => "REFINEDtype"
+ case REFINEDtpt => "REFINEDtpt"
+ case APPLIEDtype => "APPLIEDtype"
+ case APPLIEDtpt => "APPLIEDtpt"
+ case TYPEBOUNDS => "TYPEBOUNDS"
+ case TYPEBOUNDStpt => "TYPEBOUNDStpt"
+ case TYPEALIAS => "TYPEALIAS"
+ case ANDtype => "ANDtype"
+ case ANDtpt => "ANDtpt"
+ case ORtype => "ORtype"
+ case ORtpt => "ORtpt"
+ case BYNAMEtype => "BYNAMEtype"
+ case BYNAMEtpt => "BYNAMEtpt"
+ case POLYtype => "POLYtype"
+ case POLYtpt => "POLYtpt"
+ case METHODtype => "METHODtype"
+ case PARAMtype => "PARAMtype"
+ case ANNOTATION => "ANNOTATION"
+ case PRIVATEqualified => "PRIVATEqualified"
+ case PROTECTEDqualified => "PROTECTEDqualified"
+ }
+
+ /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry.
+ * If negative, minus the number of leading non-reference trees.
+ */
+ def numRefs(tag: Int) = tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | RETURN | BIND |
+ SELFDEF | REFINEDtype => 1
+ case RENAMED | PARAMtype => 2
+ case POLYtype | METHODtype => -1
+ case _ => 0
+ }
+
+ /** Map between variances and name prefixes */
+ val varianceToPrefix = Map(-1 -> '-', 0 -> '=', 1 -> '+')
+ val prefixToVariance = Map('-' -> -1, '=' -> 0, '+' -> 1)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala
new file mode 100644
index 000000000..26807115c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import core.Names.TermName
+import collection.mutable
+
+abstract class TastyName
+
+object TastyName {
+
+ case class NameRef(index: Int) extends AnyVal
+
+ case class Simple(name: TermName) extends TastyName
+ case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName
+ case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
+ case class Expanded(prefix: NameRef, original: NameRef) extends TastyName
+ case class ModuleClass(module: NameRef) extends TastyName
+ case class SuperAccessor(accessed: NameRef) extends TastyName
+ case class DefaultGetter(method: NameRef, num: Int) extends TastyName
+ case class Shadowed(original: NameRef) extends TastyName
+
+ class Table extends (NameRef => TastyName) {
+ private val names = new mutable.ArrayBuffer[TastyName]
+ def add(name: TastyName) = names += name
+ def apply(ref: NameRef) = names(ref.index)
+ def contents: Iterable[TastyName] = names
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
new file mode 100644
index 000000000..c844d522e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -0,0 +1,71 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyFormat._
+import collection.mutable
+import TastyBuffer._
+import java.util.UUID
+import core.Symbols.Symbol
+import ast.tpd
+
+class TastyPickler {
+
+ private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)]
+ val uuid = UUID.randomUUID()
+
+ private val headerBuffer = {
+ val buf = new TastyBuffer(24)
+ for (ch <- header) buf.writeByte(ch.toByte)
+ buf.writeNat(MajorVersion)
+ buf.writeNat(MinorVersion)
+ buf.writeUncompressedLong(uuid.getMostSignificantBits)
+ buf.writeUncompressedLong(uuid.getLeastSignificantBits)
+ buf
+ }
+
+ val nameBuffer = new NameBuffer
+
+ def newSection(name: String, buf: TastyBuffer) =
+ sections += ((nameBuffer.nameIndex(name), buf))
+
+ def assembleParts(): Array[Byte] = {
+ def lengthWithLength(buf: TastyBuffer) = {
+ buf.assemble()
+ buf.length + natSize(buf.length)
+ }
+ val totalSize =
+ headerBuffer.length +
+ lengthWithLength(nameBuffer) + {
+ for ((nameRef, buf) <- sections) yield
+ natSize(nameRef.index) + lengthWithLength(buf)
+ }.sum
+ val all = new TastyBuffer(totalSize)
+ all.writeBytes(headerBuffer.bytes, headerBuffer.length)
+ all.writeNat(nameBuffer.length)
+ all.writeBytes(nameBuffer.bytes, nameBuffer.length)
+ for ((nameRef, buf) <- sections) {
+ all.writeNat(nameRef.index)
+ all.writeNat(buf.length)
+ all.writeBytes(buf.bytes, buf.length)
+ }
+ assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}")
+ all.bytes
+ }
+
+ /** The address in the TASTY file of a given tree, or None if unknown.
+ * Note that trees are looked up by reference equality,
+ * so one can reliably use this function only directly after `pickler`.
+ */
+ var addrOfTree: tpd.Tree => Option[Addr] = (_ => None)
+
+ /**
+ * Addresses in TASTY file of symbols, stored by pickling.
+ * Note that trees are checked for reference equality,
+ * so one can reliably use this function only dirrectly after `pickler`
+ */
+ var addrOfSym: Symbol => Option[Addr] = (_ => None)
+
+ val treePkl = new TreePickler(this)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
new file mode 100644
index 000000000..0dc8d8fea
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -0,0 +1,122 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import Contexts._, Decorators._
+import printing.Texts._
+import TastyName._
+import StdNames._
+import TastyUnpickler._
+import TastyBuffer.Addr
+import util.Positions.{Position, offsetToInt}
+import collection.mutable
+
+class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
+
+ val unpickler = new TastyUnpickler(bytes)
+ import unpickler.{tastyName, unpickle}
+
+ def nameToString(name: TastyName): String = name match {
+ case Simple(name) => name.toString
+ case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name)
+ case Signed(original, params, result) =>
+ i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}"
+ case Expanded(prefix, original) => s"$prefix${nme.EXPAND_SEPARATOR}$original"
+ case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS"
+ case SuperAccessor(accessed) => nameRefToString(accessed) + "/SUPERACCESSOR"
+ case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num
+ case Shadowed(original) => nameRefToString(original) + "/SHADOWED"
+ }
+
+ def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref))
+
+ def printNames() =
+ for ((name, idx) <- tastyName.contents.zipWithIndex)
+ println(f"$idx%4d: " + nameToString(name))
+
+ def printContents(): Unit = {
+ println("Names:")
+ printNames()
+ println("Trees:")
+ unpickle(new TreeSectionUnpickler)
+ unpickle(new PositionSectionUnpickler)
+ }
+
+ class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") {
+ import TastyFormat._
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ import reader._
+ var indent = 0
+ def newLine() = print(f"\n ${index(currentAddr) - index(startAddr)}%5d:" + " " * indent)
+ def printNat() = print(" " + readNat())
+ def printName() = {
+ val idx = readNat()
+ print(" ") ;print(idx); print("["); print(nameRefToString(NameRef(idx))); print("]")
+ }
+ def printTree(): Unit = {
+ newLine()
+ val tag = readByte()
+ print(" ");print(astTagToString(tag))
+ indent += 2
+ if (tag >= firstLengthTreeTag) {
+ val len = readNat()
+ print(s"($len)")
+ val end = currentAddr + len
+ def printTrees() = until(end)(printTree())
+ tag match {
+ case RENAMED =>
+ printName(); printName()
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
+ printName(); printTrees()
+ case REFINEDtype =>
+ printName(); printTree(); printTrees()
+ case RETURN =>
+ printNat(); printTrees()
+ case METHODtype | POLYtype =>
+ printTree()
+ until(end) { printName(); printTree() }
+ case PARAMtype =>
+ printNat(); printNat()
+ case _ =>
+ printTrees()
+ }
+ if (currentAddr != end) {
+ println(s"incomplete read, current = $currentAddr, end = $end")
+ goto(end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) {
+ tag match {
+ case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName()
+ case _ => printNat()
+ }
+ printTree()
+ }
+ else if (tag >= firstASTTreeTag)
+ printTree()
+ else if (tag >= firstNatTreeTag)
+ tag match {
+ case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName()
+ case _ => printNat()
+ }
+ indent -= 2
+ }
+ println(i"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr")
+ println(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr")
+ while (!isAtEnd) {
+ printTree()
+ newLine()
+ }
+ }
+ }
+
+ class PositionSectionUnpickler extends SectionUnpickler[Unit]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ print(s"${reader.endAddr.index - reader.currentAddr.index}")
+ val positions = new PositionUnpickler(reader).positions
+ println(s" position bytes:")
+ val sorted = positions.toSeq.sortBy(_._1.index)
+ for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}")
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala
new file mode 100644
index 000000000..e583c4793
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala
@@ -0,0 +1,141 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyBuffer._
+import TastyName.NameRef
+import collection.mutable
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ *
+ * @param bytes The array containing data
+ * @param start The position from which to read
+ * @param end The position one greater than the last byte to be read
+ * @param base The index referenced by the logical zero address Addr(0)
+ */
+class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) {
+
+ def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length)
+
+ private var bp: Int = start
+
+ def addr(idx: Int) = Addr(idx - base)
+ def index(addr: Addr) = addr.index + base
+
+ /** The address of the first byte to read, respectively byte that was read */
+ def startAddr: Addr = addr(start)
+
+ /** The address of the next byte to read */
+ def currentAddr: Addr = addr(bp)
+
+ /** the address one greater than the last brte to read */
+ def endAddr: Addr = addr(end)
+
+ /** Have all bytes been read? */
+ def isAtEnd: Boolean = bp == end
+
+ /** A new reader over the same array with the same address base, but with
+ * specified start and end positions
+ */
+ def subReader(start: Addr, end: Addr): TastyReader =
+ new TastyReader(bytes, index(start), index(end), base)
+
+ /** Read a byte of data. */
+ def readByte(): Int = {
+ val result = bytes(bp) & 0xff
+ bp += 1
+ result
+ }
+
+ /** Returns the next byte of data as a natural number without advancing the read position */
+ def nextByte: Int = bytes(bp) & 0xff
+
+ /** Read the next `n` bytes of `data`. */
+ def readBytes(n: Int): Array[Byte] = {
+ val result = new Array[Byte](n)
+ Array.copy(bytes, bp, result, 0, n)
+ bp += n
+ result
+ }
+
+ /** Read a natural number fitting in an Int in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readNat(): Int = readLongNat.toInt
+
+ /** Read an integer number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readInt(): Int = readLongInt.toInt
+
+ /** Read a natural number fitting in a Long in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** Read a long integer number in 2's complement big endian format, base 128. */
+ def readLongInt(): Long = {
+ var b = bytes(bp)
+ var x: Long = (b << 1).toByte >> 1 // sign extend with bit 6.
+ bp += 1
+ while ((b & 0x80) == 0) {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ }
+ x
+ }
+
+ /** Read an uncompressed Long stored in 8 bytes in big endian format */
+ def readUncompressedLong(): Long = {
+ var x: Long = 0
+ for (i <- 0 to 7)
+ x = (x << 8) | (readByte() & 0xff)
+ x
+ }
+
+ /** Read a natural number and return as a NameRef */
+ def readNameRef() = NameRef(readNat())
+
+ /** Read a natural number and return as an address */
+ def readAddr() = Addr(readNat())
+
+ /** Read a length number and return the absolute end address implied by it,
+ * given as <address following length field> + <length-value-read>.
+ */
+ def readEnd(): Addr = addr(readNat() + bp)
+
+ /** Set read position to the one pointed to by `addr` */
+ def goto(addr: Addr): Unit =
+ bp = index(addr)
+
+ /** Perform `op` until `end` address is reached and collect results in a list. */
+ def until[T](end: Addr)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (bp < index(end)) buf += op
+ assert(bp == index(end))
+ buf.toList
+ }
+
+ /** If before given `end` address, the result of `op`, otherwise `default` */
+ def ifBefore[T](end: Addr)(op: => T, default: T): T =
+ if (bp < index(end)) op else default
+
+ /** Perform `op` while cindition `cond` holds and collect results in a list. */
+ def collectWhile[T](cond: => Boolean)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (cond) buf += op
+ buf.toList
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
new file mode 100644
index 000000000..8a1f58acd
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
@@ -0,0 +1,95 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import scala.collection.mutable
+import TastyFormat._
+import Names.{Name, termName}
+import java.util.UUID
+
+object TastyUnpickler {
+ class UnpickleException(msg: String) extends Exception(msg)
+
+ abstract class SectionUnpickler[R](val name: String) {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): R
+ }
+}
+
+import TastyUnpickler._
+
+class TastyUnpickler(reader: TastyReader) {
+ import reader._
+
+ def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
+
+ private val sectionReader = new mutable.HashMap[String, TastyReader]
+ val tastyName = new TastyName.Table
+
+ def check(cond: Boolean, msg: => String) =
+ if (!cond) throw new UnpickleException(msg)
+
+ def readString(): String = {
+ val TastyName.Simple(name) = tastyName(readNameRef())
+ name.toString
+ }
+
+ def readName(): TastyName = {
+ import TastyName._
+ val tag = readByte()
+ val length = readNat()
+ val start = currentAddr
+ val end = start + length
+ val result = tag match {
+ case UTF8 =>
+ goto(end)
+ Simple(termName(bytes, start.index, length))
+ case QUALIFIED =>
+ Qualified(readNameRef(), readNameRef())
+ case SIGNED =>
+ val original = readNameRef()
+ val result = readNameRef()
+ val params = until(end)(readNameRef())
+ Signed(original, params, result)
+ case EXPANDED =>
+ Expanded(readNameRef(), readNameRef())
+ case OBJECTCLASS =>
+ ModuleClass(readNameRef())
+ case SUPERACCESSOR =>
+ SuperAccessor(readNameRef())
+ case DEFAULTGETTER =>
+ DefaultGetter(readNameRef(), readNat())
+ case SHADOWED =>
+ Shadowed(readNameRef())
+ }
+ assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
+ result
+ }
+
+ private def readHeader(): UUID = {
+ for (i <- 0 until header.length)
+ check(readByte() == header(i), "not a TASTy file")
+ val major = readNat()
+ val minor = readNat()
+ check(major == MajorVersion && minor <= MinorVersion,
+ s"""TASTy signature has wrong version.
+ | expected: $MajorVersion.$MinorVersion
+ | found : $major.$minor""".stripMargin)
+ new UUID(readUncompressedLong(), readUncompressedLong())
+ }
+
+ val uuid = readHeader()
+
+ locally {
+ until(readEnd()) { tastyName.add(readName()) }
+ while (!isAtEnd) {
+ val secName = readString()
+ val secEnd = readEnd()
+ sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index)
+ goto(secEnd)
+ }
+ }
+
+ def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
+ for (reader <- sectionReader.get(sec.name)) yield
+ sec.unpickle(reader, tastyName)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
new file mode 100644
index 000000000..6c7982d78
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.{bestFit, dble}
+import TastyBuffer.{Addr, AddrWidth}
+import config.Printers.pickling
+import ast.untpd.Tree
+
+class TreeBuffer extends TastyBuffer(50000) {
+
+ private final val ItemsOverOffsets = 2
+ private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
+ private var offsets = new Array[Int](initialOffsetSize)
+ private var isRelative = new Array[Boolean](initialOffsetSize)
+ private var delta: Array[Int] = _
+ private var numOffsets = 0
+
+ /** A map from trees to the address at which a tree is pickled. */
+ private val treeAddrs = new java.util.IdentityHashMap[Tree, Any] // really: Addr | Null
+
+ def registerTreeAddr(tree: Tree): Addr = treeAddrs.get(tree) match {
+ case null => treeAddrs.put(tree, currentAddr); currentAddr
+ case addr: Addr => addr
+ }
+
+ def addrOfTree(tree: Tree): Option[Addr] = treeAddrs.get(tree) match {
+ case null => None
+ case addr: Addr => Some(addr)
+ }
+
+ private def offset(i: Int): Addr = Addr(offsets(i))
+
+ private def keepOffset(relative: Boolean): Unit = {
+ if (numOffsets == offsets.length) {
+ offsets = dble(offsets)
+ isRelative = dble(isRelative)
+ }
+ offsets(numOffsets) = length
+ isRelative(numOffsets) = relative
+ numOffsets += 1
+ }
+
+ /** Reserve space for a reference, to be adjusted later */
+ def reserveRef(relative: Boolean): Addr = {
+ val addr = currentAddr
+ keepOffset(relative)
+ reserveAddr()
+ addr
+ }
+
+ /** Write reference right adjusted into freshly reserved field. */
+ def writeRef(target: Addr) = {
+ keepOffset(relative = false)
+ fillAddr(reserveAddr(), target)
+ }
+
+ /** Fill previously reserved field with a reference */
+ def fillRef(at: Addr, target: Addr, relative: Boolean) = {
+ val addr = if (relative) target.relativeTo(at) else target
+ fillAddr(at, addr)
+ }
+
+ /** The amount by which the bytes at the given address are shifted under compression */
+ def deltaAt(at: Addr): Int = {
+ val idx = bestFit(offsets, numOffsets, at.index - 1)
+ if (idx < 0) 0 else delta(idx)
+ }
+
+ /** The address to which `x` is translated under compression */
+ def adjusted(x: Addr): Addr = x - deltaAt(x)
+
+ /** Compute all shift-deltas */
+ private def computeDeltas() = {
+ delta = new Array[Int](numOffsets)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val off = offset(i)
+ val skippedOff = skipZeroes(off)
+ val skippedCount = skippedOff.index - off.index
+ assert(skippedCount < AddrWidth, s"unset field at position $off")
+ lastDelta += skippedCount
+ delta(i) = lastDelta
+ i += 1
+ }
+ }
+
+ /** The absolute or relative adjusted address at index `i` of `offsets` array*/
+ private def adjustedOffset(i: Int): Addr = {
+ val at = offset(i)
+ val original = getAddr(at)
+ if (isRelative(i)) {
+ val start = skipNat(at)
+ val len1 = original + delta(i) - deltaAt(original + start.index)
+ val len2 = adjusted(original + start.index) - adjusted(start).index
+ assert(len1 == len2,
+ s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2")
+ len1
+ } else adjusted(original)
+ }
+
+ /** Adjust all offsets according to previously computed deltas */
+ private def adjustOffsets(): Unit = {
+ for (i <- 0 until numOffsets) {
+ val corrected = adjustedOffset(i)
+ fillAddr(offset(i), corrected)
+ }
+ }
+
+ /** Adjust deltas to also take account references that will shrink (and thereby
+ * generate additional zeroes that can be skipped) due to previously
+ * computed adjustments.
+ */
+ private def adjustDeltas(): Int = {
+ val delta1 = new Array[Int](delta.length)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val corrected = adjustedOffset(i)
+ lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index)
+ delta1(i) = lastDelta
+ i += 1
+ }
+ val saved =
+ if (numOffsets == 0) 0
+ else delta1(numOffsets - 1) - delta(numOffsets - 1)
+ delta = delta1
+ saved
+ }
+
+ /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */
+ private def compress(): Int = {
+ var lastDelta = 0
+ var start = 0
+ var i = 0
+ var wasted = 0
+ def shift(end: Int) =
+ Array.copy(bytes, start, bytes, start - lastDelta, end - start)
+ while (i < numOffsets) {
+ val next = offsets(i)
+ shift(next)
+ start = next + delta(i) - lastDelta
+ val pastZeroes = skipZeroes(Addr(next)).index
+ assert(pastZeroes >= start, s"something's wrong: eliminated non-zero")
+ wasted += (pastZeroes - start)
+ lastDelta = delta(i)
+ i += 1
+ }
+ shift(length)
+ length -= lastDelta
+ wasted
+ }
+
+ def adjustTreeAddrs(): Unit = {
+ val it = treeAddrs.keySet.iterator
+ while (it.hasNext) {
+ val tree = it.next
+ treeAddrs.get(tree) match {
+ case addr: Addr => treeAddrs.put(tree, adjusted(addr))
+ case addrs: List[Addr] => treeAddrs.put(tree, addrs.map(adjusted))
+ }
+ }
+ }
+
+ /** Final assembly, involving the following steps:
+ * - compute deltas
+ * - adjust deltas until additional savings are < 1% of total
+ * - adjust offsets according to the adjusted deltas
+ * - shrink buffer, skipping zeroes.
+ */
+ def compactify(): Unit = {
+ val origLength = length
+ computeDeltas()
+ //println(s"offsets: ${offsets.take(numOffsets).deep}")
+ //println(s"deltas: ${delta.take(numOffsets).deep}")
+ var saved = 0
+ do {
+ saved = adjustDeltas()
+ pickling.println(s"adjusting deltas, saved = $saved")
+ } while (saved > 0 && length / saved < 100)
+ adjustOffsets()
+ adjustTreeAddrs()
+ val wasted = compress()
+ pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now.
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
new file mode 100644
index 000000000..80270aa25
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -0,0 +1,641 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast.Trees._
+import ast.untpd
+import TastyFormat._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._
+import collection.mutable
+import typer.Inliner
+import NameOps._
+import StdNames.nme
+import TastyBuffer._
+import TypeApplications._
+
+class TreePickler(pickler: TastyPickler) {
+ val buf = new TreeBuffer
+ pickler.newSection("ASTs", buf)
+ import buf._
+ import pickler.nameBuffer.{nameIndex, fullNameIndex}
+ import ast.tpd._
+
+ private val symRefs = new mutable.HashMap[Symbol, Addr]
+ private val forwardSymRefs = new mutable.HashMap[Symbol, List[Addr]]
+ private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null
+
+ private def withLength(op: => Unit) = {
+ val lengthAddr = reserveRef(relative = true)
+ op
+ fillRef(lengthAddr, currentAddr, relative = true)
+ }
+
+ def addrOfSym(sym: Symbol): Option[Addr] = {
+ symRefs.get(sym)
+ }
+
+ def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case tree: MemberDef =>
+ if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr
+ case _ =>
+ }
+
+ def registerDef(sym: Symbol): Unit = {
+ symRefs(sym) = currentAddr
+ forwardSymRefs.get(sym) match {
+ case Some(refs) =>
+ refs.foreach(fillRef(_, currentAddr, relative = false))
+ forwardSymRefs -= sym
+ case None =>
+ }
+ }
+
+ private def pickleName(name: Name): Unit = writeNat(nameIndex(name).index)
+ private def pickleName(name: TastyName): Unit = writeNat(nameIndex(name).index)
+ private def pickleNameAndSig(name: Name, sig: Signature) = {
+ val Signature(params, result) = sig
+ pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result)))
+ }
+
+ private def pickleName(sym: Symbol)(implicit ctx: Context): Unit = {
+ def encodeSuper(name: Name): TastyName.NameRef =
+ if (sym is Flags.SuperAccessor) {
+ val SuperAccessorName(n) = name
+ nameIndex(TastyName.SuperAccessor(nameIndex(n)))
+ }
+ else nameIndex(name)
+ val nameRef =
+ if (sym is Flags.ExpandedName)
+ nameIndex(
+ TastyName.Expanded(
+ nameIndex(sym.name.expandedPrefix),
+ encodeSuper(sym.name.unexpandedName)))
+ else encodeSuper(sym.name)
+ writeNat(nameRef.index)
+ }
+
+ private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) =>
+ if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
+ case None =>
+ // See pos/t1957.scala for an example where this can happen.
+ // I believe it's a bug in typer: the type of an implicit argument refers
+ // to a closure parameter outside the closure itself. TODO: track this down, so that we
+ // can eliminate this case.
+ ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
+ pickleForwardSymRef(sym)
+ }
+
+ private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = {
+ val ref = reserveRef(relative = false)
+ assert(!sym.is(Flags.Package), sym)
+ forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
+ }
+
+ private def isLocallyDefined(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) => assert(sym.exists); label != NoAddr
+ case None => false
+ }
+
+ def pickleConstant(c: Constant)(implicit ctx: Context): Unit = c.tag match {
+ case UnitTag =>
+ writeByte(UNITconst)
+ case BooleanTag =>
+ writeByte(if (c.booleanValue) TRUEconst else FALSEconst)
+ case ByteTag =>
+ writeByte(BYTEconst)
+ writeInt(c.byteValue)
+ case ShortTag =>
+ writeByte(SHORTconst)
+ writeInt(c.shortValue)
+ case CharTag =>
+ writeByte(CHARconst)
+ writeNat(c.charValue)
+ case IntTag =>
+ writeByte(INTconst)
+ writeInt(c.intValue)
+ case LongTag =>
+ writeByte(LONGconst)
+ writeLongInt(c.longValue)
+ case FloatTag =>
+ writeByte(FLOATconst)
+ writeInt(java.lang.Float.floatToRawIntBits(c.floatValue))
+ case DoubleTag =>
+ writeByte(DOUBLEconst)
+ writeLongInt(java.lang.Double.doubleToRawLongBits(c.doubleValue))
+ case StringTag =>
+ writeByte(STRINGconst)
+ writeNat(nameIndex(c.stringValue).index)
+ case NullTag =>
+ writeByte(NULLconst)
+ case ClazzTag =>
+ writeByte(CLASSconst)
+ pickleType(c.typeValue)
+ case EnumTag =>
+ writeByte(ENUMconst)
+ pickleType(c.symbolValue.termRef)
+ }
+
+ def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = try {
+ val tpe = tpe0.stripTypeVar
+ val prev = pickledTypes.get(tpe)
+ if (prev == null) {
+ pickledTypes.put(tpe, currentAddr)
+ pickleNewType(tpe, richTypes)
+ }
+ else {
+ writeByte(SHARED)
+ writeRef(prev.asInstanceOf[Addr])
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(i"error when pickling type $tpe0")
+ throw ex
+ }
+
+ private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = try { tpe match {
+ case AppliedType(tycon, args) =>
+ writeByte(APPLIEDtype)
+ withLength { pickleType(tycon); args.foreach(pickleType(_)) }
+ case ConstantType(value) =>
+ pickleConstant(value)
+ case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
+ pickleType(tpe.superType)
+ case tpe: WithFixedSym =>
+ val sym = tpe.symbol
+ def pickleRef() =
+ if (tpe.prefix == NoPrefix) {
+ writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
+ pickleSymRef(sym)
+ }
+ else {
+ assert(tpe.symbol.isClass)
+ assert(tpe.symbol.is(Flags.Scala2x), tpe.symbol.showLocated)
+ writeByte(TYPEREF) // should be changed to a new entry that keeps track of prefix, symbol & owner
+ pickleName(tpe.name)
+ pickleType(tpe.prefix)
+ }
+ if (sym.is(Flags.Package)) {
+ writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg)
+ pickleName(qualifiedName(sym))
+ }
+ else if (sym is Flags.BindDefinedType) {
+ registerDef(sym)
+ writeByte(BIND)
+ withLength {
+ pickleName(sym.name)
+ pickleType(sym.info)
+ pickleRef()
+ }
+ }
+ else pickleRef()
+ case tpe: TermRefWithSignature =>
+ if (tpe.symbol.is(Flags.Package)) picklePackageRef(tpe.symbol)
+ else {
+ writeByte(TERMREF)
+ pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix)
+ }
+ case tpe: NamedType =>
+ if (isLocallyDefined(tpe.symbol)) {
+ writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol)
+ pickleSymRef(tpe.symbol); pickleType(tpe.prefix)
+ } else {
+ writeByte(if (tpe.isType) TYPEREF else TERMREF)
+ pickleName(tpe.name); pickleType(tpe.prefix)
+ }
+ case tpe: ThisType =>
+ if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot)
+ picklePackageRef(tpe.cls)
+ else {
+ writeByte(THIS)
+ pickleType(tpe.tref)
+ }
+ case tpe: SuperType =>
+ writeByte(SUPERtype)
+ withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)}
+ case tpe: RecThis =>
+ writeByte(RECthis)
+ val binderAddr = pickledTypes.get(tpe.binder)
+ assert(binderAddr != null, tpe.binder)
+ writeRef(binderAddr.asInstanceOf[Addr])
+ case tpe: SkolemType =>
+ pickleType(tpe.info)
+ case tpe: RefinedType =>
+ writeByte(REFINEDtype)
+ withLength {
+ pickleName(tpe.refinedName)
+ pickleType(tpe.parent)
+ pickleType(tpe.refinedInfo, richTypes = true)
+ }
+ case tpe: RecType =>
+ writeByte(RECtype)
+ pickleType(tpe.parent)
+ case tpe: TypeAlias =>
+ writeByte(TYPEALIAS)
+ withLength {
+ pickleType(tpe.alias, richTypes)
+ tpe.variance match {
+ case 1 => writeByte(COVARIANT)
+ case -1 => writeByte(CONTRAVARIANT)
+ case 0 =>
+ }
+ }
+ case tpe: TypeBounds =>
+ writeByte(TYPEBOUNDS)
+ withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) }
+ case tpe: AnnotatedType =>
+ writeByte(ANNOTATEDtype)
+ withLength { pickleType(tpe.tpe, richTypes); pickleTree(tpe.annot.tree) }
+ case tpe: AndOrType =>
+ writeByte(if (tpe.isAnd) ANDtype else ORtype)
+ withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) }
+ case tpe: ExprType =>
+ writeByte(BYNAMEtype)
+ pickleType(tpe.underlying)
+ case tpe: PolyType =>
+ writeByte(POLYtype)
+ val paramNames = tpe.typeParams.map(tparam =>
+ varianceToPrefix(tparam.paramVariance) +: tparam.paramName)
+ pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds)
+ case tpe: MethodType if richTypes =>
+ writeByte(METHODtype)
+ pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes)
+ case tpe: PolyParam =>
+ if (!pickleParamType(tpe))
+ // TODO figure out why this case arises in e.g. pickling AbstractFileReader.
+ ctx.typerState.constraint.entry(tpe) match {
+ case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes)
+ case _ => assert(false, s"orphan poly parameter: $tpe")
+ }
+ case tpe: MethodParam =>
+ assert(pickleParamType(tpe), s"orphan method parameter: $tpe")
+ case tpe: LazyRef =>
+ pickleType(tpe.ref)
+ }} catch {
+ case ex: AssertionError =>
+ println(i"error while pickling type $tpe")
+ throw ex
+ }
+
+ def picklePackageRef(pkg: Symbol)(implicit ctx: Context): Unit = {
+ writeByte(TERMREFpkg)
+ pickleName(qualifiedName(pkg))
+ }
+
+ def pickleMethodic(result: Type, names: List[Name], types: List[Type])(implicit ctx: Context) =
+ withLength {
+ pickleType(result, richTypes = true)
+ (names, types).zipped.foreach { (name, tpe) =>
+ pickleName(name); pickleType(tpe)
+ }
+ }
+
+ def pickleParamType(tpe: ParamType)(implicit ctx: Context): Boolean = {
+ val binder = pickledTypes.get(tpe.binder)
+ val pickled = binder != null
+ if (pickled) {
+ writeByte(PARAMtype)
+ withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) }
+ }
+ pickled
+ }
+
+ def pickleTpt(tpt: Tree)(implicit ctx: Context): Unit =
+ pickleTree(tpt)
+
+ def pickleTreeUnlessEmpty(tree: Tree)(implicit ctx: Context): Unit =
+ if (!tree.isEmpty) pickleTree(tree)
+
+ def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(implicit ctx: Context) = {
+ assert(symRefs(sym) == NoAddr, sym)
+ registerDef(sym)
+ writeByte(tag)
+ withLength {
+ pickleName(sym)
+ pickleParams
+ tpt match {
+ case templ: Template => pickleTree(tpt)
+ case _ if tpt.isType => pickleTpt(tpt)
+ }
+ pickleTreeUnlessEmpty(rhs)
+ pickleModifiers(sym)
+ }
+ }
+
+ def pickleParam(tree: Tree)(implicit ctx: Context): Unit = {
+ registerTreeAddr(tree)
+ tree match {
+ case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt)
+ case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs)
+ case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
+ }
+ }
+
+ def pickleParams(trees: List[Tree])(implicit ctx: Context): Unit = {
+ trees.foreach(preRegister)
+ trees.foreach(pickleParam)
+ }
+
+ def pickleStats(stats: List[Tree])(implicit ctx: Context) = {
+ stats.foreach(preRegister)
+ stats.foreach(stat => if (!stat.isEmpty) pickleTree(stat))
+ }
+
+ def pickleTree(tree: Tree)(implicit ctx: Context): Unit = {
+ val addr = registerTreeAddr(tree)
+ if (addr != currentAddr) {
+ writeByte(SHARED)
+ writeRef(addr)
+ }
+ else
+ try tree match {
+ case Ident(name) =>
+ tree.tpe match {
+ case tp: TermRef if name != nme.WILDCARD =>
+ // wildcards are pattern bound, need to be preserved as ids.
+ pickleType(tp)
+ case _ =>
+ writeByte(if (tree.isType) IDENTtpt else IDENT)
+ pickleName(name)
+ pickleType(tree.tpe)
+ }
+ case This(qual) =>
+ if (qual.isEmpty) pickleType(tree.tpe)
+ else {
+ writeByte(QUALTHIS)
+ val ThisType(tref) = tree.tpe
+ pickleTree(qual.withType(tref))
+ }
+ case Select(qual, name) =>
+ writeByte(if (name.isTypeName) SELECTtpt else SELECT)
+ val realName = tree.tpe match {
+ case tp: NamedType if tp.name.isShadowedName => tp.name
+ case _ => name
+ }
+ val sig = tree.tpe.signature
+ if (sig == Signature.NotAMethod) pickleName(realName)
+ else pickleNameAndSig(realName, sig)
+ pickleTree(qual)
+ case Apply(fun, args) =>
+ writeByte(APPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTree)
+ }
+ case TypeApply(fun, args) =>
+ writeByte(TYPEAPPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTpt)
+ }
+ case Literal(const1) =>
+ pickleConstant {
+ tree.tpe match {
+ case ConstantType(const2) => const2
+ case _ => const1
+ }
+ }
+ case Super(qual, mix) =>
+ writeByte(SUPER)
+ withLength {
+ pickleTree(qual);
+ if (!mix.isEmpty) {
+ val SuperType(_, mixinType: TypeRef) = tree.tpe
+ pickleTree(mix.withType(mixinType))
+ }
+ }
+ case New(tpt) =>
+ writeByte(NEW)
+ pickleTpt(tpt)
+ case Typed(expr, tpt) =>
+ writeByte(TYPED)
+ withLength { pickleTree(expr); pickleTpt(tpt) }
+ case NamedArg(name, arg) =>
+ writeByte(NAMEDARG)
+ withLength { pickleName(name); pickleTree(arg) }
+ case Assign(lhs, rhs) =>
+ writeByte(ASSIGN)
+ withLength { pickleTree(lhs); pickleTree(rhs) }
+ case Block(stats, expr) =>
+ writeByte(BLOCK)
+ stats.foreach(preRegister)
+ withLength { pickleTree(expr); stats.foreach(pickleTree) }
+ case If(cond, thenp, elsep) =>
+ writeByte(IF)
+ withLength { pickleTree(cond); pickleTree(thenp); pickleTree(elsep) }
+ case Closure(env, meth, tpt) =>
+ writeByte(LAMBDA)
+ assert(env.isEmpty)
+ withLength {
+ pickleTree(meth)
+ if (tpt.tpe.exists) pickleTpt(tpt)
+ }
+ case Match(selector, cases) =>
+ writeByte(MATCH)
+ withLength { pickleTree(selector); cases.foreach(pickleTree) }
+ case CaseDef(pat, guard, rhs) =>
+ writeByte(CASEDEF)
+ withLength { pickleTree(pat); pickleTree(rhs); pickleTreeUnlessEmpty(guard) }
+ case Return(expr, from) =>
+ writeByte(RETURN)
+ withLength { pickleSymRef(from.symbol); pickleTreeUnlessEmpty(expr) }
+ case Try(block, cases, finalizer) =>
+ writeByte(TRY)
+ withLength { pickleTree(block); cases.foreach(pickleTree); pickleTreeUnlessEmpty(finalizer) }
+ case SeqLiteral(elems, elemtpt) =>
+ writeByte(REPEATED)
+ withLength { pickleTree(elemtpt); elems.foreach(pickleTree) }
+ case Inlined(call, bindings, expansion) =>
+ writeByte(INLINED)
+ bindings.foreach(preRegister)
+ withLength { pickleTree(call); pickleTree(expansion); bindings.foreach(pickleTree) }
+ case Bind(name, body) =>
+ registerDef(tree.symbol)
+ writeByte(BIND)
+ withLength { pickleName(name); pickleType(tree.symbol.info); pickleTree(body) }
+ case Alternative(alts) =>
+ writeByte(ALTERNATIVE)
+ withLength { alts.foreach(pickleTree) }
+ case UnApply(fun, implicits, patterns) =>
+ writeByte(UNAPPLY)
+ withLength {
+ pickleTree(fun)
+ for (implicitArg <- implicits) {
+ writeByte(IMPLICITarg)
+ pickleTree(implicitArg)
+ }
+ pickleType(tree.tpe)
+ patterns.foreach(pickleTree)
+ }
+ case tree: ValDef =>
+ pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs)
+ case tree: DefDef =>
+ def pickleAllParams = {
+ pickleParams(tree.tparams)
+ for (vparams <- tree.vparamss) {
+ writeByte(PARAMS)
+ withLength { pickleParams(vparams) }
+ }
+ }
+ pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams)
+ case tree: TypeDef =>
+ pickleDef(TYPEDEF, tree.symbol, tree.rhs)
+ case tree: Template =>
+ registerDef(tree.symbol)
+ writeByte(TEMPLATE)
+ val (params, rest) = tree.body partition {
+ case stat: TypeDef => stat.symbol is Flags.Param
+ case stat: ValOrDefDef =>
+ stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter
+ case _ => false
+ }
+ withLength {
+ pickleParams(params)
+ tree.parents.foreach(pickleTree)
+ val cinfo @ ClassInfo(_, _, _, _, selfInfo) = tree.symbol.owner.info
+ if ((selfInfo ne NoType) || !tree.self.isEmpty) {
+ writeByte(SELFDEF)
+ pickleName(tree.self.name)
+
+ if (!tree.self.tpt.isEmpty) pickleTree(tree.self.tpt)
+ else {
+ if (!tree.self.isEmpty) registerTreeAddr(tree.self)
+ pickleType {
+ cinfo.selfInfo match {
+ case sym: Symbol => sym.info
+ case tp: Type => tp
+ }
+ }
+ }
+ }
+ pickleStats(tree.constr :: rest)
+ }
+ case Import(expr, selectors) =>
+ writeByte(IMPORT)
+ withLength {
+ pickleTree(expr)
+ selectors foreach {
+ case Thicket((from @ Ident(_)) :: (to @ Ident(_)) :: Nil) =>
+ pickleSelector(IMPORTED, from)
+ pickleSelector(RENAMED, to)
+ case id @ Ident(_) =>
+ pickleSelector(IMPORTED, id)
+ }
+ }
+ case PackageDef(pid, stats) =>
+ writeByte(PACKAGE)
+ withLength { pickleType(pid.tpe); pickleStats(stats) }
+ case tree: TypeTree =>
+ pickleType(tree.tpe)
+ case SingletonTypeTree(ref) =>
+ writeByte(SINGLETONtpt)
+ pickleTree(ref)
+ case RefinedTypeTree(parent, refinements) =>
+ if (refinements.isEmpty) pickleTree(parent)
+ else {
+ val refineCls = refinements.head.symbol.owner.asClass
+ pickledTypes.put(refineCls.typeRef, currentAddr)
+ writeByte(REFINEDtpt)
+ refinements.foreach(preRegister)
+ withLength { pickleTree(parent); refinements.foreach(pickleTree) }
+ }
+ case AppliedTypeTree(tycon, args) =>
+ writeByte(APPLIEDtpt)
+ withLength { pickleTree(tycon); args.foreach(pickleTree) }
+ case AndTypeTree(tp1, tp2) =>
+ writeByte(ANDtpt)
+ withLength { pickleTree(tp1); pickleTree(tp2) }
+ case OrTypeTree(tp1, tp2) =>
+ writeByte(ORtpt)
+ withLength { pickleTree(tp1); pickleTree(tp2) }
+ case ByNameTypeTree(tp) =>
+ writeByte(BYNAMEtpt)
+ pickleTree(tp)
+ case Annotated(tree, annot) =>
+ writeByte(ANNOTATEDtpt)
+ withLength { pickleTree(tree); pickleTree(annot.tree) }
+ case PolyTypeTree(tparams, body) =>
+ writeByte(POLYtpt)
+ withLength { pickleParams(tparams); pickleTree(body) }
+ case TypeBoundsTree(lo, hi) =>
+ writeByte(TYPEBOUNDStpt)
+ withLength { pickleTree(lo); pickleTree(hi) }
+ }
+ catch {
+ case ex: AssertionError =>
+ println(i"error when pickling tree $tree")
+ throw ex
+ }
+ }
+
+ def pickleSelector(tag: Int, id: untpd.Ident)(implicit ctx: Context): Unit = {
+ registerTreeAddr(id)
+ writeByte(tag)
+ pickleName(id.name)
+ }
+
+ def qualifiedName(sym: Symbol)(implicit ctx: Context): TastyName =
+ if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName)
+ else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name))
+
+ def pickleModifiers(sym: Symbol)(implicit ctx: Context): Unit = {
+ import Flags._
+ val flags = sym.flags
+ val privateWithin = sym.privateWithin
+ if (privateWithin.exists) {
+ writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified)
+ pickleType(privateWithin.typeRef)
+ }
+ if (flags is Private) writeByte(PRIVATE)
+ if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED)
+ if ((flags is Final) && !(sym is Module)) writeByte(FINAL)
+ if (flags is Case) writeByte(CASE)
+ if (flags is Override) writeByte(OVERRIDE)
+ if (flags is Inline) writeByte(INLINE)
+ if (flags is JavaStatic) writeByte(STATIC)
+ if (flags is Module) writeByte(OBJECT)
+ if (flags is Local) writeByte(LOCAL)
+ if (flags is Synthetic) writeByte(SYNTHETIC)
+ if (flags is Artifact) writeByte(ARTIFACT)
+ if (flags is Scala2x) writeByte(SCALA2X)
+ if (flags is InSuperCall) writeByte(INSUPERCALL)
+ if (sym.isTerm) {
+ if (flags is Implicit) writeByte(IMPLICIT)
+ if ((flags is Lazy) && !(sym is Module)) writeByte(LAZY)
+ if (flags is AbsOverride) { writeByte(ABSTRACT); writeByte(OVERRIDE) }
+ if (flags is Mutable) writeByte(MUTABLE)
+ if (flags is Accessor) writeByte(FIELDaccessor)
+ if (flags is CaseAccessor) writeByte(CASEaccessor)
+ if (flags is DefaultParameterized) writeByte(DEFAULTparameterized)
+ if (flags is Stable) writeByte(STABLE)
+ } else {
+ if (flags is Sealed) writeByte(SEALED)
+ if (flags is Abstract) writeByte(ABSTRACT)
+ if (flags is Trait) writeByte(TRAIT)
+ if (flags is Covariant) writeByte(COVARIANT)
+ if (flags is Contravariant) writeByte(CONTRAVARIANT)
+ }
+ sym.annotations.foreach(pickleAnnotation)
+ }
+
+ def pickleAnnotation(ann: Annotation)(implicit ctx: Context) =
+ if (ann.symbol != defn.BodyAnnot) { // inline bodies are reconstituted automatically when unpickling
+ writeByte(ANNOTATION)
+ withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
+ }
+
+ def pickle(trees: List[Tree])(implicit ctx: Context) = {
+ trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree))
+ assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, % when pickling ${ctx.source}")
+ }
+
+ def compactify() = {
+ buf.compactify()
+
+ def updateMapWithDeltas[T](mp: collection.mutable.Map[T, Addr]) =
+ for (key <- mp.keysIterator.toBuffer[T]) mp(key) = adjusted(mp(key))
+
+ updateMapWithDeltas(symRefs)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
new file mode 100644
index 000000000..eba9ab533
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -0,0 +1,1161 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
+import StdNames._, Denotations._, Flags._, Constants._, Annotations._
+import util.Positions._
+import ast.{tpd, Trees, untpd}
+import Trees._
+import Decorators._
+import TastyUnpickler._, TastyBuffer._
+import scala.annotation.{tailrec, switch}
+import scala.collection.mutable.ListBuffer
+import scala.collection.{ mutable, immutable }
+import config.Printers.pickling
+
+/** Unpickler for typed trees
+ * @param reader the reader from which to unpickle
+ * @param tastyName the nametable
+ * @param posUNpicklerOpt the unpickler for positions, if it exists
+ */
+class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table, posUnpicklerOpt: Option[PositionUnpickler]) {
+ import TastyFormat._
+ import TastyName._
+ import TreeUnpickler._
+ import tpd._
+
+ /** A map from addresses of definition entries to the symbols they define */
+ private val symAtAddr = new mutable.HashMap[Addr, Symbol]
+
+ /** A temporary map from addresses of definition entries to the trees they define.
+ * Used to remember trees of symbols that are created by a completion. Emptied
+ * once the tree is inlined into a larger tree.
+ */
+ private val treeAtAddr = new mutable.HashMap[Addr, Tree]
+
+ /** A map from addresses of type entries to the types they define.
+ * Currently only populated for types that might be recursively referenced
+ * from within themselves (i.e. RefinedTypes, PolyTypes, MethodTypes).
+ */
+ private val typeAtAddr = new mutable.HashMap[Addr, Type]
+
+ /** The root symbol denotation which are defined by the Tasty file associated with this
+ * TreeUnpickler. Set by `enterTopLevel`.
+ */
+ private var roots: Set[SymDenotation] = null
+
+ /** The root symbols that are defined in this Tasty file. This
+ * is a subset of `roots.map(_.symbol)`.
+ */
+ private var seenRoots: Set[Symbol] = Set()
+
+ /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */
+ private var ownerTree: OwnerTree = _
+
+ private def registerSym(addr: Addr, sym: Symbol) =
+ symAtAddr(addr) = sym
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = {
+ this.roots = roots
+ var rdr = new TreeReader(reader).fork
+ ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr)
+ rdr.indexStats(reader.endAddr)
+ }
+
+ /** The unpickled trees */
+ def unpickle()(implicit ctx: Context): List[Tree] = {
+ assert(roots != null, "unpickle without previous enterTopLevel")
+ new TreeReader(reader).readTopLevel()(ctx.addMode(Mode.AllowDependentFunctions))
+ }
+
+ def toTermName(tname: TastyName): TermName = tname match {
+ case Simple(name) => name
+ case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
+ case Signed(original, params, result) => toTermName(original)
+ case Shadowed(original) => toTermName(original).shadowedName
+ case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
+ case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
+ case SuperAccessor(accessed) => toTermName(accessed).superName
+ case DefaultGetter(meth, num) => ???
+ }
+
+ def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref))
+ def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName
+
+ class Completer(owner: Symbol, reader: TastyReader) extends LazyType {
+ import reader._
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ treeAtAddr(currentAddr) =
+ new TreeReader(reader).readIndexedDef()(
+ ctx.withPhaseNoLater(ctx.picklerPhase).withOwner(owner))
+ }
+ }
+
+ class TreeReader(val reader: TastyReader) {
+ import reader._
+
+ def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
+ def fork = forkAt(currentAddr)
+
+ def skipTree(tag: Int): Unit =
+ if (tag >= firstLengthTreeTag) goto(readEnd())
+ else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
+ else if (tag >= firstASTTreeTag) skipTree()
+ else if (tag >= firstNatTreeTag) readNat()
+ def skipTree(): Unit = skipTree(readByte())
+
+ def skipParams(): Unit =
+ while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
+
+ /** Record all directly nested definitions and templates in current tree
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTree(buf: ListBuffer[OwnerTree], mode: MemberDefMode = AllDefs): Unit = {
+ val start = currentAddr
+ val tag = readByte()
+ tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | TEMPLATE =>
+ val end = readEnd()
+ for (i <- 0 until numRefs(tag)) readNat()
+ if (tag == TEMPLATE) scanTrees(buf, end, MemberDefsOnly)
+ if (mode != NoMemberDefs) buf += new OwnerTree(start, tag, fork, end)
+ goto(end)
+ case tag =>
+ if (mode == MemberDefsOnly) skipTree(tag)
+ else if (tag >= firstLengthTreeTag) {
+ val end = readEnd()
+ var nrefs = numRefs(tag)
+ if (nrefs < 0) {
+ for (i <- nrefs until 0) scanTree(buf)
+ goto(end)
+ }
+ else {
+ for (i <- 0 until nrefs) readNat()
+ scanTrees(buf, end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) { readNat(); scanTree(buf) }
+ else if (tag >= firstASTTreeTag) scanTree(buf)
+ else if (tag >= firstNatTreeTag) readNat()
+ }
+ }
+
+ /** Record all directly nested definitions and templates between current address and `end`
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTrees(buf: ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode = AllDefs): Unit = {
+ while (currentAddr.index < end.index) scanTree(buf, mode)
+ assert(currentAddr.index == end.index)
+ }
+
+ /** The next tag, following through SHARED tags */
+ def nextUnsharedTag: Int = {
+ val tag = nextByte
+ if (tag == SHARED) {
+ val lookAhead = fork
+ lookAhead.reader.readByte()
+ forkAt(lookAhead.reader.readAddr()).nextUnsharedTag
+ }
+ else tag
+ }
+
+ def readName(): TermName = toTermName(readNameRef())
+
+ def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
+ tastyName(readNameRef()) match {
+ case Signed(original, params, result) =>
+ var sig = Signature(params map toTypeName, toTypeName(result))
+ if (sig == Signature.NotAMethod) sig = Signature.NotAMethod
+ (toTermName(original), sig)
+ case name =>
+ toTermName(name)
+ }
+
+// ------ Reading types -----------------------------------------------------
+
+ /** Read names in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamNames(end: Addr): List[Name] =
+ until(end) {
+ val name = readName()
+ skipTree()
+ name
+ }
+
+ /** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
+ until(end) { readNat(); readType().asInstanceOf[T] }
+
+ /** Read referece to definition and return symbol created at that definition */
+ def readSymRef()(implicit ctx: Context): Symbol = symbolAt(readAddr())
+
+ /** The symbol at given address; createa new one if none exists yet */
+ def symbolAt(addr: Addr)(implicit ctx: Context): Symbol = symAtAddr.get(addr) match {
+ case Some(sym) =>
+ sym
+ case None =>
+ val sym = forkAt(addr).createSymbol()(ctx.withOwner(ownerTree.findOwner(addr)))
+ ctx.log(i"forward reference to $sym")
+ sym
+ }
+
+ /** The symbol defined by current definition */
+ def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match {
+ case Some(sym) =>
+ assert(ctx.owner == sym.owner, i"owner discrepancy for $sym, expected: ${ctx.owner}, found: ${sym.owner}")
+ sym
+ case None =>
+ createSymbol()
+ }
+
+ /** Read a type */
+ def readType()(implicit ctx: Context): Type = {
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading type ${astTagToString(tag)} at $start")
+
+ def registeringType[T](tp: Type, op: => T): T = {
+ typeAtAddr(start) = tp
+ op
+ }
+
+ def readLengthType(): Type = {
+ val end = readEnd()
+
+ def readNamesSkipParams: (List[Name], TreeReader) = {
+ val nameReader = fork
+ nameReader.skipTree() // skip result
+ val paramReader = nameReader.fork
+ (nameReader.readParamNames(end), paramReader)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPERtype =>
+ SuperType(readType(), readType())
+ case REFINEDtype =>
+ var name: Name = readName()
+ val parent = readType()
+ val ttag = nextUnsharedTag
+ if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
+ RefinedType(parent, name, readType())
+ // Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
+ // Eta expansion of the latter puts readType() out of the expression.
+ case APPLIEDtype =>
+ readType().appliedTo(until(end)(readType()))
+ case TYPEBOUNDS =>
+ TypeBounds(readType(), readType())
+ case TYPEALIAS =>
+ val alias = readType()
+ val variance =
+ if (nextByte == COVARIANT) { readByte(); 1 }
+ else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
+ else 0
+ TypeAlias(alias, variance)
+ case ANNOTATEDtype =>
+ AnnotatedType(readType(), Annotation(readTerm()))
+ case ANDtype =>
+ AndType(readType(), readType())
+ case ORtype =>
+ OrType(readType(), readType())
+ case BIND =>
+ val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
+ registerSym(start, sym)
+ TypeRef.withFixedSym(NoPrefix, sym.name, sym)
+ case POLYtype =>
+ val (rawNames, paramReader) = readNamesSkipParams
+ val (variances, paramNames) = rawNames
+ .map(name => (prefixToVariance(name.head), name.tail.toTypeName)).unzip
+ val result = PolyType(paramNames, variances)(
+ pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
+ pt => readType())
+ goto(end)
+ result
+ case METHODtype =>
+ val (names, paramReader) = readNamesSkipParams
+ val result = MethodType(names.map(_.toTermName), paramReader.readParamTypes[Type](end))(
+ mt => registeringType(mt, readType()))
+ goto(end)
+ result
+ case PARAMtype =>
+ readTypeRef() match {
+ case binder: PolyType => PolyParam(binder, readNat())
+ case binder: MethodType => MethodParam(binder, readNat())
+ }
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case ENUMconst =>
+ ConstantType(Constant(readTermRef().termSymbol))
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ def readSimpleType(): Type = (tag: @switch) match {
+ case TYPEREFdirect | TERMREFdirect =>
+ NamedType.withFixedSym(NoPrefix, readSymRef())
+ case TYPEREFsymbol | TERMREFsymbol =>
+ readSymNameRef()
+ case TYPEREFpkg =>
+ readPackageRef().moduleClass.typeRef
+ case TERMREFpkg =>
+ readPackageRef().termRef
+ case TYPEREF =>
+ val name = readName().toTypeName
+ TypeRef(readType(), name)
+ case TERMREF =>
+ readNameSplitSig() match {
+ case name: TermName => TermRef.all(readType(), name)
+ case (name: TermName, sig: Signature) => TermRef.withSig(readType(), name, sig)
+ }
+ case THIS =>
+ ThisType.raw(readType().asInstanceOf[TypeRef])
+ case RECtype =>
+ RecType(rt => registeringType(rt, readType()))
+ case RECthis =>
+ RecThis(readTypeRef().asInstanceOf[RecType])
+ case SHARED =>
+ val ref = readAddr()
+ typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
+ case UNITconst =>
+ ConstantType(Constant(()))
+ case TRUEconst =>
+ ConstantType(Constant(true))
+ case FALSEconst =>
+ ConstantType(Constant(false))
+ case BYTEconst =>
+ ConstantType(Constant(readInt().toByte))
+ case SHORTconst =>
+ ConstantType(Constant(readInt().toShort))
+ case CHARconst =>
+ ConstantType(Constant(readNat().toChar))
+ case INTconst =>
+ ConstantType(Constant(readInt()))
+ case LONGconst =>
+ ConstantType(Constant(readLongInt()))
+ case FLOATconst =>
+ ConstantType(Constant(java.lang.Float.intBitsToFloat(readInt())))
+ case DOUBLEconst =>
+ ConstantType(Constant(java.lang.Double.longBitsToDouble(readLongInt())))
+ case STRINGconst =>
+ ConstantType(Constant(readName().toString))
+ case NULLconst =>
+ ConstantType(Constant(null))
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case BYNAMEtype =>
+ ExprType(readType())
+ }
+
+ if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
+ }
+
+ private def readSymNameRef()(implicit ctx: Context): Type = {
+ val sym = readSymRef()
+ val prefix = readType()
+ val res = NamedType.withSymAndName(prefix, sym, sym.name)
+ prefix match {
+ case prefix: ThisType if prefix.cls eq sym.owner => res.withDenot(sym.denot)
+ // without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
+ // the problem arises when a self type of a trait is a type parameter of the same trait.
+ case _ => res
+ }
+ }
+
+ private def readPackageRef()(implicit ctx: Context): TermSymbol = {
+ val name = readName()
+ if (name == nme.ROOT || name == nme.ROOTPKG) defn.RootPackage
+ else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
+ else ctx.requiredPackage(name)
+ }
+
+ def readTypeRef(): Type =
+ typeAtAddr(readAddr())
+
+ def readTermRef()(implicit ctx: Context): TermRef =
+ readType().asInstanceOf[TermRef]
+
+// ------ Reading definitions -----------------------------------------------------
+
+ private def noRhs(end: Addr): Boolean =
+ currentAddr == end || isModifierTag(nextByte)
+
+ private def localContext(owner: Symbol)(implicit ctx: Context) = {
+ val lctx = ctx.fresh.setOwner(owner)
+ if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
+ }
+
+ private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbsType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
+ val lacksDefinition =
+ rhsIsEmpty &&
+ name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
+ isAbsType
+ var flags = givenFlags
+ if (lacksDefinition && tag != PARAM) flags |= Deferred
+ if (tag == DEFDEF) flags |= Method
+ if (givenFlags is Module)
+ flags = flags | (if (tag == VALDEF) ModuleCreationFlags else ModuleClassCreationFlags)
+ if (ctx.owner.isClass) {
+ if (tag == TYPEPARAM) flags |= Param
+ else if (tag == PARAM) flags |= ParamAccessor
+ }
+ else if (isParamTag(tag)) flags |= Param
+ flags
+ }
+
+ def isAbstractType(ttag: Int)(implicit ctx: Context): Boolean = nextUnsharedTag match {
+ case POLYtpt =>
+ val rdr = fork
+ rdr.reader.readByte() // tag
+ rdr.reader.readNat() // length
+ rdr.skipParams() // tparams
+ rdr.isAbstractType(rdr.nextUnsharedTag)
+ case TYPEBOUNDS | TYPEBOUNDStpt => true
+ case _ => false
+ }
+
+ /** Create symbol of definition node and enter in symAtAddr map
+ * @return the created symbol
+ */
+ def createSymbol()(implicit ctx: Context): Symbol = nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createMemberSymbol()
+ case TEMPLATE =>
+ val localDummy = ctx.newLocalDummy(ctx.owner)
+ registerSym(currentAddr, localDummy)
+ localDummy
+ case tag =>
+ throw new Error(s"illegal createSymbol at $currentAddr, tag = $tag")
+ }
+
+ /** Create symbol of member definition or parameter node and enter in symAtAddr map
+ * @return the created symbol
+ */
+ def createMemberSymbol()(implicit ctx: Context): Symbol = {
+ val start = currentAddr
+ val tag = readByte()
+ val end = readEnd()
+ val rawName = tastyName(readNameRef())
+ var name: Name = toTermName(rawName)
+ if (tag == TYPEDEF || tag == TYPEPARAM) name = name.toTypeName
+ skipParams()
+ val ttag = nextUnsharedTag
+ val isAbsType = isAbstractType(ttag)
+ val isClass = ttag == TEMPLATE
+ val templateStart = currentAddr
+ skipTree() // tpt
+ val rhsStart = currentAddr
+ val rhsIsEmpty = noRhs(end)
+ if (!rhsIsEmpty) skipTree()
+ val (givenFlags, annots, privateWithin) = readModifiers(end)
+ def nameFlags(tname: TastyName): FlagSet = tname match {
+ case TastyName.Expanded(_, original) => ExpandedName | nameFlags(tastyName(original))
+ case TastyName.SuperAccessor(_) => Flags.SuperAccessor
+ case _ => EmptyFlags
+ }
+ pickling.println(i"creating symbol $name at $start with flags $givenFlags")
+ val flags = normalizeFlags(tag, givenFlags | nameFlags(rawName), name, isAbsType, rhsIsEmpty)
+ def adjustIfModule(completer: LazyType) =
+ if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
+ val sym =
+ roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
+ case Some(rootd) =>
+ pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}")
+ rootd.info = adjustIfModule(
+ new Completer(ctx.owner, subReader(start, end)) with SymbolLoaders.SecondCompleter)
+ rootd.flags = flags &~ Touched // allow one more completion
+ rootd.privateWithin = privateWithin
+ seenRoots += rootd.symbol
+ rootd.symbol
+ case _ =>
+ val completer = adjustIfModule(new Completer(ctx.owner, subReader(start, end)))
+ if (isClass)
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord = start.index)
+ else
+ ctx.newSymbol(ctx.owner, name, flags, completer, privateWithin, coord = start.index)
+ } // TODO set position somehow (but take care not to upset Symbol#isDefinedInCurrentRun)
+ sym.annotations = annots
+ ctx.enter(sym)
+ registerSym(start, sym)
+ if (isClass) {
+ sym.completer.withDecls(newScope)
+ forkAt(templateStart).indexTemplateParams()(localContext(sym))
+ }
+ else if (sym.isInlineMethod)
+ sym.addAnnotation(LazyBodyAnnotation { ctx0 =>
+ implicit val ctx: Context = localContext(sym)(ctx0).addMode(Mode.ReadPositions)
+ // avoids space leaks by not capturing the current context
+ forkAt(rhsStart).readTerm()
+ })
+ goto(start)
+ sym
+ }
+
+ /** Read modifier list into triplet of flags, annotations and a privateWithin
+ * boundary symbol.
+ */
+ def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
+ var flags: FlagSet = EmptyFlags
+ var annots = new mutable.ListBuffer[Annotation]
+ var privateWithin: Symbol = NoSymbol
+ while (currentAddr.index != end.index) {
+ def addFlag(flag: FlagSet) = {
+ flags |= flag
+ readByte()
+ }
+ nextByte match {
+ case PRIVATE => addFlag(Private)
+ case INTERNAL => ??? // addFlag(Internal)
+ case PROTECTED => addFlag(Protected)
+ case ABSTRACT =>
+ readByte()
+ nextByte match {
+ case OVERRIDE => addFlag(AbsOverride)
+ case _ => flags |= Abstract
+ }
+ case FINAL => addFlag(Final)
+ case SEALED => addFlag(Sealed)
+ case CASE => addFlag(Case)
+ case IMPLICIT => addFlag(Implicit)
+ case LAZY => addFlag(Lazy)
+ case OVERRIDE => addFlag(Override)
+ case INLINE => addFlag(Inline)
+ case STATIC => addFlag(JavaStatic)
+ case OBJECT => addFlag(Module)
+ case TRAIT => addFlag(Trait)
+ case LOCAL => addFlag(Local)
+ case SYNTHETIC => addFlag(Synthetic)
+ case ARTIFACT => addFlag(Artifact)
+ case MUTABLE => addFlag(Mutable)
+ case LABEL => addFlag(Label)
+ case FIELDaccessor => addFlag(Accessor)
+ case CASEaccessor => addFlag(CaseAccessor)
+ case COVARIANT => addFlag(Covariant)
+ case CONTRAVARIANT => addFlag(Contravariant)
+ case SCALA2X => addFlag(Scala2x)
+ case DEFAULTparameterized => addFlag(DefaultParameterized)
+ case INSUPERCALL => addFlag(InSuperCall)
+ case STABLE => addFlag(Stable)
+ case PRIVATEqualified =>
+ readByte()
+ privateWithin = readType().typeSymbol
+ case PROTECTEDqualified =>
+ addFlag(Protected)
+ privateWithin = readType().typeSymbol
+ case ANNOTATION =>
+ readByte()
+ val end = readEnd()
+ val sym = readType().typeSymbol
+ val lazyAnnotTree = readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+ annots += Annotation.deferred(sym, _ => lazyAnnotTree.complete)
+ case _ =>
+ assert(false, s"illegal modifier tag at $currentAddr")
+ }
+ }
+ (flags, annots.toList, privateWithin)
+ }
+
+ /** Create symbols for the definitions in the statement sequence between
+ * current address and `end`.
+ * @return the largest subset of {NoInits, PureInterface} that a
+ * trait owning the indexed statements can have as flags.
+ */
+ def indexStats(end: Addr)(implicit ctx: Context): FlagSet = {
+ var initsFlags = NoInitsInterface
+ while (currentAddr.index < end.index) {
+ nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ val sym = symbolAtCurrent()
+ skipTree()
+ if (sym.isTerm && !sym.is(MethodOrLazyOrDeferred))
+ initsFlags = EmptyFlags
+ else if (sym.isClass ||
+ sym.is(Method, butNot = Deferred) && !sym.isConstructor)
+ initsFlags &= NoInits
+ case IMPORT =>
+ skipTree()
+ case PACKAGE =>
+ processPackage { (pid, end) => implicit ctx => indexStats(end) }
+ case _ =>
+ skipTree()
+ initsFlags = EmptyFlags
+ }
+ }
+ assert(currentAddr.index == end.index)
+ initsFlags
+ }
+
+ /** Process package with given operation `op`. The operation takes as arguments
+ * - a `RefTree` representing the `pid` of the package,
+ * - an end address,
+ * - a context which has the processd package as owner
+ */
+ def processPackage[T](op: (RefTree, Addr) => Context => T)(implicit ctx: Context): T = {
+ readByte()
+ val end = readEnd()
+ val pid = ref(readTermRef()).asInstanceOf[RefTree]
+ op(pid, end)(localContext(pid.symbol.moduleClass))
+ }
+
+ /** Create symbols the longest consecutive sequence of parameters with given
+ * `tag` starting at current address.
+ */
+ def indexParams(tag: Int)(implicit ctx: Context) =
+ while (nextByte == tag) {
+ symbolAtCurrent()
+ skipTree()
+ }
+
+ /** Create symbols for all type and value parameters of template starting
+ * at current address.
+ */
+ def indexTemplateParams()(implicit ctx: Context) = {
+ assert(readByte() == TEMPLATE)
+ readEnd()
+ indexParams(TYPEPARAM)
+ indexParams(PARAM)
+ }
+
+ /** If definition was already read by a completer, return the previously read tree
+ * or else read definition.
+ */
+ def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
+ case Some(tree) => skipTree(); tree
+ case none => readNewDef()
+ }
+
+ private def readNewDef()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ val sym = symAtAddr(start)
+ val tag = readByte()
+ val end = readEnd()
+
+ def readParamss(implicit ctx: Context): List[List[ValDef]] = {
+ collectWhile(nextByte == PARAMS) {
+ readByte()
+ readEnd()
+ readParams[ValDef](PARAM)
+ }
+ }
+
+ def readRhs(implicit ctx: Context) =
+ if (noRhs(end)) EmptyTree
+ else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+
+ def localCtx = localContext(sym)
+
+ def ValDef(tpt: Tree) =
+ ta.assignType(untpd.ValDef(sym.name.asTermName, tpt, readRhs(localCtx)), sym)
+
+ def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
+ ta.assignType(
+ untpd.DefDef(
+ sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
+ sym)
+
+ def TypeDef(rhs: Tree) =
+ ta.assignType(untpd.TypeDef(sym.name.asTypeName, rhs), sym)
+
+ def ta = ctx.typeAssigner
+
+ val name = readName()
+ pickling.println(s"reading def of $name at $start")
+ val tree: MemberDef = tag match {
+ case DEFDEF =>
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val vparamss = readParamss(localCtx)
+ val tpt = readTpt()
+ val typeParams = tparams.map(_.symbol)
+ val valueParamss = ctx.normalizeIfConstructor(
+ vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
+ val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe)
+ sym.info = ctx.methodType(typeParams, valueParamss, resType)
+ if (sym.isSetter && sym.accessedFieldOrGetter.is(ParamAccessor)) {
+ // reconstitute ParamAccessor flag of setters for var parameters, which is not pickled
+ sym.setFlag(ParamAccessor)
+ sym.resetFlag(Deferred)
+ }
+ DefDef(tparams, vparamss, tpt)
+ case VALDEF =>
+ val tpt = readTpt()
+ sym.info = tpt.tpe
+ ValDef(tpt)
+ case TYPEDEF | TYPEPARAM =>
+ if (sym.isClass) {
+ val companion = sym.scalacLinkedClass
+
+ // Is the companion defined in the same Tasty file as `sym`?
+ // The only case to check here is if `sym` is a root. In this case
+ // `companion` might have been entered by the environment but it might
+ // be missing from the Tasty file. So we check explicitly for that.
+ def isCodefined =
+ roots.contains(companion.denot) == seenRoots.contains(companion)
+ if (companion.exists && isCodefined) {
+ import transform.SymUtils._
+ if (sym is Flags.ModuleClass) sym.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, companion)
+ else sym.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, companion)
+ }
+ TypeDef(readTemplate(localCtx))
+ } else {
+ val rhs = readTpt()
+ sym.info = rhs.tpe match {
+ case _: TypeBounds | _: ClassInfo => rhs.tpe
+ case _ => TypeAlias(rhs.tpe, sym.variance)
+ }
+ TypeDef(rhs)
+ }
+ case PARAM =>
+ val tpt = readTpt()
+ if (noRhs(end)) {
+ sym.info = tpt.tpe
+ ValDef(tpt)
+ }
+ else {
+ sym.setFlag(Method)
+ sym.info = ExprType(tpt.tpe)
+ pickling.println(i"reading param alias $name -> $currentAddr")
+ DefDef(Nil, Nil, tpt)
+ }
+ }
+ val mods =
+ if (sym.annotations.isEmpty) untpd.EmptyModifiers
+ else untpd.Modifiers(annotations = sym.annotations.map(_.tree))
+ tree.withMods(mods)
+ // record annotations in tree so that tree positions can be filled in.
+ // Note: Once the inline PR with its changes to positions is in, this should be
+ // no longer necessary.
+ goto(end)
+ setPos(start, tree)
+ }
+
+ private def readTemplate(implicit ctx: Context): Template = {
+ val start = currentAddr
+ val cls = ctx.owner.asClass
+ def setClsInfo(parents: List[TypeRef], selfType: Type) =
+ cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
+ val assumedSelfType =
+ if (cls.is(Module) && cls.owner.isClass)
+ TermRef.withSig(cls.owner.thisType, cls.name.sourceModuleName, Signature.NotAMethod)
+ else NoType
+ setClsInfo(Nil, assumedSelfType)
+ val localDummy = symbolAtCurrent()
+ assert(readByte() == TEMPLATE)
+ val end = readEnd()
+ val tparams = readIndexedParams[TypeDef](TYPEPARAM)
+ val vparams = readIndexedParams[ValDef](PARAM)
+ val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) {
+ nextByte match {
+ case APPLY | TYPEAPPLY => readTerm()
+ case _ => readTpt()
+ }
+ }
+ val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
+ val self =
+ if (nextByte == SELFDEF) {
+ readByte()
+ untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
+ }
+ else EmptyValDef
+ setClsInfo(parentRefs, if (self.isEmpty) NoType else self.tpt.tpe)
+ cls.setApplicableFlags(fork.indexStats(end))
+ val constr = readIndexedDef().asInstanceOf[DefDef]
+
+ def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
+ (tparams, stats) match {
+ case (tparam :: tparams1, (alias: TypeDef) :: stats1)
+ if tparam.name == alias.name.expandedName(cls) =>
+ val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
+ (tparam :: alias :: tas, stats2)
+ case _ =>
+ (tparams, stats)
+ }
+
+ val lazyStats = readLater(end, rdr => implicit ctx => {
+ val stats0 = rdr.readIndexedStats(localDummy, end)
+ val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
+ tparamsAndAliases ++ vparams ++ stats
+ })
+ setPos(start,
+ untpd.Template(constr, parents, self, lazyStats)
+ .withType(localDummy.nonMemberTermRef))
+ }
+
+ def skipToplevel()(implicit ctx: Context): Unit= {
+ if (!isAtEnd)
+ nextByte match {
+ case IMPORT | PACKAGE =>
+ skipTree()
+ skipToplevel()
+ case _ =>
+ }
+ }
+
+ def readTopLevel()(implicit ctx: Context): List[Tree] = {
+ @tailrec def read(acc: ListBuffer[Tree]): List[Tree] = nextByte match {
+ case IMPORT | PACKAGE =>
+ acc += readIndexedStat(NoSymbol)
+ if (!isAtEnd) read(acc) else acc.toList
+ case _ => // top-level trees which are not imports or packages are not part of tree
+ acc.toList
+ }
+ read(new ListBuffer[tpd.Tree])
+ }
+
+ def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
+ case TYPEDEF | VALDEF | DEFDEF =>
+ readIndexedDef()
+ case IMPORT =>
+ readImport()
+ case PACKAGE =>
+ val start = currentAddr
+ processPackage { (pid, end) => implicit ctx =>
+ setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
+ }
+ case _ =>
+ readTerm()(ctx.withOwner(exprOwner))
+ }
+
+ def readImport()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ readByte()
+ readEnd()
+ val expr = readTerm()
+ def readSelectors(): List[untpd.Tree] = nextByte match {
+ case IMPORTED =>
+ val start = currentAddr
+ readByte()
+ val from = setPos(start, untpd.Ident(readName()))
+ nextByte match {
+ case RENAMED =>
+ val start2 = currentAddr
+ readByte()
+ val to = setPos(start2, untpd.Ident(readName()))
+ untpd.Thicket(from, to) :: readSelectors()
+ case _ =>
+ from :: readSelectors()
+ }
+ case _ =>
+ Nil
+ }
+ setPos(start, Import(expr, readSelectors()))
+ }
+
+ def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
+ until(end)(readIndexedStat(exprOwner))
+
+ def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
+ fork.indexStats(end)
+ readIndexedStats(exprOwner, end)
+ }
+
+ def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
+ collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
+
+ def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = {
+ fork.indexParams(tag)
+ readIndexedParams(tag)
+ }
+
+// ------ Reading trees -----------------------------------------------------
+
+ def readTerm()(implicit ctx: Context): Tree = { // TODO: rename to readTree
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading term ${astTagToString(tag)} at $start")
+
+ def readPathTerm(): Tree = {
+ goto(start)
+ readType() match {
+ case path: TypeRef => TypeTree(path)
+ case path: TermRef => ref(path)
+ case path: ThisType => This(path.cls)
+ case path: ConstantType => Literal(path.value)
+ }
+ }
+
+ def completeSelect(name: Name, tpf: Type => Type): Select = {
+ val localCtx =
+ if (name == nme.CONSTRUCTOR) ctx.addMode(Mode.InSuperCall) else ctx
+ val qual = readTerm()(localCtx)
+ val unshadowed = if (name.isShadowedName) name.revertShadowed else name
+ untpd.Select(qual, unshadowed).withType(tpf(qual.tpe.widenIfUnstable))
+ }
+
+ def readQualId(): (untpd.Ident, TypeRef) = {
+ val qual = readTerm().asInstanceOf[untpd.Ident]
+ (untpd.Ident(qual.name).withPos(qual.pos), qual.tpe.asInstanceOf[TypeRef])
+ }
+
+ def readSimpleTerm(): Tree = tag match {
+ case SHARED =>
+ forkAt(readAddr()).readTerm()
+ case IDENT =>
+ untpd.Ident(readName()).withType(readType())
+ case IDENTtpt =>
+ untpd.Ident(readName().toTypeName).withType(readType())
+ case SELECT =>
+ def readRest(name: Name, sig: Signature) =
+ completeSelect(name, TermRef.withSig(_, name.asTermName, sig))
+ readNameSplitSig match {
+ case name: Name => readRest(name, Signature.NotAMethod)
+ case (name: Name, sig: Signature) => readRest(name, sig)
+ }
+ case SELECTtpt =>
+ val name = readName().toTypeName
+ completeSelect(name, TypeRef(_, name))
+ case QUALTHIS =>
+ val (qual, tref) = readQualId()
+ untpd.This(qual).withType(ThisType.raw(tref))
+ case NEW =>
+ New(readTpt())
+ case SINGLETONtpt =>
+ SingletonTypeTree(readTerm())
+ case BYNAMEtpt =>
+ ByNameTypeTree(readTpt())
+ case _ =>
+ readPathTerm()
+ }
+
+ def readLengthTerm(): Tree = {
+ val end = readEnd()
+
+ def localNonClassCtx = {
+ val ctx1 = ctx.fresh.setNewScope
+ if (ctx.owner.isClass) ctx1.setOwner(ctx1.newLocalDummy(ctx.owner)) else ctx1
+ }
+
+ def readBlock(mkTree: (List[Tree], Tree) => Tree): Tree = {
+ val exprReader = fork
+ skipTree()
+ val localCtx = localNonClassCtx
+ val stats = readStats(ctx.owner, end)(localCtx)
+ val expr = exprReader.readTerm()(localCtx)
+ mkTree(stats, expr)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPER =>
+ val qual = readTerm()
+ val (mixId, mixTpe) = ifBefore(end)(readQualId(), (untpd.EmptyTypeIdent, NoType))
+ tpd.Super(qual, mixId, ctx.mode.is(Mode.InSuperCall), mixTpe.typeSymbol)
+ case APPLY =>
+ val fn = readTerm()
+ val isJava = fn.symbol.is(JavaDefined)
+ def readArg() = readTerm() match {
+ case SeqLiteral(elems, elemtpt) if isJava =>
+ JavaSeqLiteral(elems, elemtpt)
+ case arg => arg
+ }
+ tpd.Apply(fn, until(end)(readArg()))
+ case TYPEAPPLY =>
+ tpd.TypeApply(readTerm(), until(end)(readTpt()))
+ case TYPED =>
+ val expr = readTerm()
+ val tpt = readTpt()
+ val expr1 = expr match {
+ case SeqLiteral(elems, elemtpt) if tpt.tpe.isRef(defn.ArrayClass) =>
+ JavaSeqLiteral(elems, elemtpt)
+ case expr => expr
+ }
+ Typed(expr1, tpt)
+ case NAMEDARG =>
+ NamedArg(readName(), readTerm())
+ case ASSIGN =>
+ Assign(readTerm(), readTerm())
+ case BLOCK =>
+ readBlock(Block)
+ case INLINED =>
+ val call = readTerm()
+ readBlock((defs, expr) => Inlined(call, defs.asInstanceOf[List[MemberDef]], expr))
+ case IF =>
+ If(readTerm(), readTerm(), readTerm())
+ case LAMBDA =>
+ val meth = readTerm()
+ val tpt = ifBefore(end)(readTpt(), EmptyTree)
+ Closure(Nil, meth, tpt)
+ case MATCH =>
+ Match(readTerm(), readCases(end))
+ case RETURN =>
+ val from = readSymRef()
+ val expr = ifBefore(end)(readTerm(), EmptyTree)
+ Return(expr, Ident(from.termRef))
+ case TRY =>
+ Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))
+ case REPEATED =>
+ val elemtpt = readTpt()
+ SeqLiteral(until(end)(readTerm()), elemtpt)
+ case BIND =>
+ val name = readName()
+ val info = readType()
+ val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, info)
+ registerSym(start, sym)
+ Bind(sym, readTerm())
+ case ALTERNATIVE =>
+ Alternative(until(end)(readTerm()))
+ case UNAPPLY =>
+ val fn = readTerm()
+ val implicitArgs =
+ collectWhile(nextByte == IMPLICITarg) {
+ readByte()
+ readTerm()
+ }
+ val patType = readType()
+ val argPats = until(end)(readTerm())
+ UnApply(fn, implicitArgs, argPats, patType)
+ case REFINEDtpt =>
+ val refineCls = ctx.newCompleteClassSymbol(
+ ctx.owner, tpnme.REFINE_CLASS, Fresh, parents = Nil)
+ typeAtAddr(start) = refineCls.typeRef
+ val parent = readTpt()
+ val refinements = readStats(refineCls, end)(localContext(refineCls))
+ RefinedTypeTree(parent, refinements, refineCls)
+ case APPLIEDtpt =>
+ AppliedTypeTree(readTpt(), until(end)(readTpt()))
+ case ANDtpt =>
+ AndTypeTree(readTpt(), readTpt())
+ case ORtpt =>
+ OrTypeTree(readTpt(), readTpt())
+ case ANNOTATEDtpt =>
+ Annotated(readTpt(), readTerm())
+ case POLYtpt =>
+ val localCtx = localNonClassCtx
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val body = readTpt()(localCtx)
+ PolyTypeTree(tparams, body)
+ case TYPEBOUNDStpt =>
+ TypeBoundsTree(readTpt(), readTpt())
+ case _ =>
+ readPathTerm()
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
+ tree.overwriteType(tree.tpe.simplified)
+ setPos(start, tree)
+ }
+
+ def readTpt()(implicit ctx: Context) =
+ if (isTypeTreeTag(nextUnsharedTag)) readTerm()
+ else {
+ val start = currentAddr
+ val tp = readType()
+ if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
+ }
+
+ def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
+ collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
+
+ def readCase()(implicit ctx: Context): CaseDef = {
+ val start = currentAddr
+ readByte()
+ val end = readEnd()
+ val pat = readTerm()
+ val rhs = readTerm()
+ val guard = ifBefore(end)(readTerm(), EmptyTree)
+ setPos(start, CaseDef(pat, guard, rhs))
+ }
+
+ def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
+ val localReader = fork
+ goto(end)
+ new LazyReader(localReader, op)
+ }
+
+// ------ Setting positions ------------------------------------------------
+
+ /** Set position of `tree` at given `addr`. */
+ def setPos[T <: untpd.Tree](addr: Addr, tree: T)(implicit ctx: Context): tree.type =
+ if (ctx.mode.is(Mode.ReadPositions)) {
+ posUnpicklerOpt match {
+ case Some(posUnpickler) =>
+ //println(i"setPos $tree / ${tree.getClass} at $addr to ${posUnpickler.posAt(addr)}")
+ val pos = posUnpickler.posAt(addr)
+ if (pos.exists) tree.setPosUnchecked(pos)
+ tree
+ case _ =>
+ //println(i"no pos $tree")
+ tree
+ }
+ }
+ else tree
+ }
+
+ class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] {
+ def complete(implicit ctx: Context): T = {
+ pickling.println(i"starting to read at ${reader.reader.currentAddr}")
+ op(reader)(ctx.addMode(Mode.AllowDependentFunctions).withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader) extends LazyAnnotation(sym) {
+ def complete(implicit ctx: Context) = {
+ reader.readTerm()(ctx.withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ /** A lazy datastructure that records how definitions are nested in TASTY data.
+ * The structure is lazy because it needs to be computed only for forward references
+ * to symbols that happen before the referenced symbol is created (see `symbolAt`).
+ * Such forward references are rare.
+ *
+ * @param addr The address of tree representing an owning definition, NoAddr for root tree
+ * @param tag The tag at `addr`. Used to determine which subtrees to scan for children
+ * (i.e. if `tag` is template, don't scan member defs, as these belong already
+ * to enclosing class).
+ * @param reader The reader to be used for scanning for children
+ * @param end The end of the owning definition
+ */
+ class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr) {
+
+ /** All definitions that have the definition at `addr` as closest enclosing definition */
+ lazy val children: List[OwnerTree] = {
+ val buf = new ListBuffer[OwnerTree]
+ reader.scanTrees(buf, end, if (tag == TEMPLATE) NoMemberDefs else AllDefs)
+ buf.toList
+ }
+
+ /** Find the owner of definition at `addr` */
+ def findOwner(addr: Addr)(implicit ctx: Context): Symbol = {
+ def search(cs: List[OwnerTree], current: Symbol): Symbol =
+ try cs match {
+ case ot :: cs1 =>
+ if (ot.addr.index == addr.index)
+ current
+ else if (ot.addr.index < addr.index && addr.index < ot.end.index)
+ search(ot.children, reader.symbolAt(ot.addr))
+ else
+ search(cs1, current)
+ case Nil =>
+ throw new TreeWithoutOwner
+ }
+ catch {
+ case ex: TreeWithoutOwner =>
+ println(i"no owner for $addr among $cs") // DEBUG
+ throw ex
+ }
+ search(children, NoSymbol)
+ }
+
+ override def toString = s"OwnerTree(${addr.index}, ${end.index}"
+ }
+}
+
+object TreeUnpickler {
+
+ /** An enumeration indicating which subtrees should be added to an OwnerTree. */
+ type MemberDefMode = Int
+ final val MemberDefsOnly = 0 // add only member defs; skip other statements
+ final val NoMemberDefs = 1 // add only statements that are not member defs
+ final val AllDefs = 2 // add everything
+
+ class TreeWithoutOwner extends Exception
+}
+
+
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
new file mode 100644
index 000000000..17fef3852
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
@@ -0,0 +1,299 @@
+package dotty.tools
+package dotc
+package core
+package unpickleScala2
+
+import Flags._
+
+/** Variable length byte arrays, with methods for basic pickling and unpickling.
+ *
+ * @param data The initial buffer
+ * @param from The first index where defined data are found
+ * @param to The first index where new data can be written
+ */
+class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
+
+ var bytes = data
+ var readIndex = from
+ var writeIndex = to
+
+ /** Double bytes array */
+ private def dble(): Unit = {
+ val bytes1 = new Array[Byte](bytes.length * 2)
+ Array.copy(bytes, 0, bytes1, 0, writeIndex)
+ bytes = bytes1
+ }
+
+ def ensureCapacity(capacity: Int) =
+ while (bytes.length < writeIndex + capacity) dble()
+
+ // -- Basic output routines --------------------------------------------
+
+ /** Write a byte of data */
+ def writeByte(b: Int): Unit = {
+ if (writeIndex == bytes.length) dble()
+ bytes(writeIndex) = b.toByte
+ writeIndex += 1
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int): Unit =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /**
+ * Like writeNat, but for longs. This is not the same as
+ * writeLong, which writes in base 256. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long): Unit = {
+ def writeNatPrefix(x: Long): Unit = {
+ val y = x >>> 7
+ if (y != 0L) writeNatPrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writeNatPrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+
+ /** Write a natural number <code>x</code> at position <code>pos</code>.
+ * If number is more than one byte, shift rest of array to make space.
+ *
+ * @param pos ...
+ * @param x ...
+ */
+ def patchNat(pos: Int, x: Int): Unit = {
+ def patchNatPrefix(x: Int): Unit = {
+ writeByte(0)
+ Array.copy(bytes, pos, bytes, pos + 1, writeIndex - (pos + 1))
+ bytes(pos) = ((x & 0x7f) | 0x80).toByte
+ val y = x >>> 7
+ if (y != 0) patchNatPrefix(y)
+ }
+ bytes(pos) = (x & 0x7f).toByte
+ val y = x >>> 7
+ if (y != 0) patchNatPrefix(y)
+ }
+
+ /** Write a long number <code>x</code> in signed big endian format, base 256.
+ *
+ * @param x The long number to be written.
+ */
+ def writeLong(x: Long): Unit = {
+ val y = x >> 8
+ val z = x & 0xff
+ if (-y != (z >> 7)) writeLong(y)
+ writeByte(z.toInt)
+ }
+
+ // -- Basic input routines --------------------------------------------
+
+ /** Peek at the current byte without moving the read index */
+ def peekByte(): Int = bytes(readIndex)
+
+ /** Read a byte */
+ def readByte(): Int = {
+ val x = bytes(readIndex); readIndex += 1; x
+ }
+
+ /** Read a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.*/
+ def readNat(): Int = readLongNat().toInt
+
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = readByte()
+ x = (x << 7) + (b & 0x7f)
+ } while ((b & 0x80) != 0L)
+ x
+ }
+
+ /** Read a long number in signed big endian format, base 256. */
+ def readLong(len: Int): Long = {
+ var x = 0L
+ var i = 0
+ while (i < len) {
+ x = (x << 8) + (readByte() & 0xff)
+ i += 1
+ }
+ val leading = 64 - (len << 3)
+ x << leading >> leading
+ }
+
+ /** Returns the buffer as a sequence of (Int, Array[Byte]) representing
+ * (tag, data) of the individual entries. Saves and restores buffer state.
+ */
+
+ def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = {
+ val saved = readIndex
+ readIndex = 0
+ readNat() ; readNat() // discarding version
+ val result = new Array[(Int, Array[Byte])](readNat())
+
+ result.indices foreach { index =>
+ val tag = readNat()
+ val len = readNat()
+ val bytes = data.slice(readIndex, len + readIndex)
+ readIndex += len
+
+ result(index) = tag -> bytes
+ }
+
+ readIndex = saved
+ result.toIndexedSeq
+ }
+
+ /** Perform operation <code>op</code> until the condition
+ * <code>readIndex == end</code> is satisfied.
+ * Concatenate results into a list.
+ *
+ * @param end ...
+ * @param op ...
+ * @return ...
+ */
+ def until[T](end: Int, op: () => T): List[T] =
+ if (readIndex == end) List() else op() :: until(end, op)
+
+ /** Perform operation <code>op</code> the number of
+ * times specified. Concatenate the results into a list.
+ */
+ def times[T](n: Int, op: ()=>T): List[T] =
+ if (n == 0) List() else op() :: times(n-1, op)
+
+ /** Pickle = majorVersion_Nat minorVersion_Nat nbEntries_Nat {Entry}
+ * Entry = type_Nat length_Nat [actual entries]
+ *
+ * Assumes that the ..Version_Nat are already consumed.
+ *
+ * @return an array mapping entry numbers to locations in
+ * the byte array where the entries start.
+ */
+ def createIndex: Array[Int] = {
+ val index = new Array[Int](readNat()) // nbEntries_Nat
+ for (i <- 0 until index.length) {
+ index(i) = readIndex
+ readByte() // skip type_Nat
+ readIndex = readNat() + readIndex // read length_Nat, jump to next entry
+ }
+ index
+ }
+}
+
+object PickleBuffer {
+
+ private final val ScalaFlagEnd = 48
+ private final val ChunkBits = 8
+ private final val ChunkSize = 1 << ChunkBits
+ private type FlagMap = Array[Array[Long]]
+
+ private val (scalaTermFlagMap, scalaTypeFlagMap) = {
+ import scala.reflect.internal.Flags._
+
+ // The following vals are copy-pasted from reflect.internal.Flags.
+ // They are unfortunately private there, so we cannot get at them directly.
+ // Using the public method pickledToRawFlags instead looks unattractive
+ // because of performance.
+ val IMPLICIT_PKL = (1 << 0)
+ val FINAL_PKL = (1 << 1)
+ val PRIVATE_PKL = (1 << 2)
+ val PROTECTED_PKL = (1 << 3)
+ val SEALED_PKL = (1 << 4)
+ val OVERRIDE_PKL = (1 << 5)
+ val CASE_PKL = (1 << 6)
+ val ABSTRACT_PKL = (1 << 7)
+ val DEFERRED_PKL = (1 << 8)
+ val METHOD_PKL = (1 << 9)
+ val MODULE_PKL = (1 << 10)
+ val INTERFACE_PKL = (1 << 11)
+
+ val corr = Map(
+ PROTECTED_PKL -> Protected,
+ OVERRIDE_PKL -> Override,
+ PRIVATE_PKL -> Private,
+ ABSTRACT_PKL -> Abstract,
+ DEFERRED_PKL -> Deferred,
+ FINAL_PKL -> Final,
+ METHOD_PKL -> Method,
+ INTERFACE_PKL -> NoInitsInterface,
+ MODULE_PKL -> (Module | Lazy, Module),
+ IMPLICIT_PKL -> Implicit,
+ SEALED_PKL -> Sealed,
+ CASE_PKL -> Case,
+ MUTABLE -> Mutable,
+ PARAM -> Param,
+ PACKAGE -> Package,
+ MACRO -> Macro,
+ BYNAMEPARAM -> (Method, Covariant),
+ LABEL -> (Label, Contravariant),
+ ABSOVERRIDE -> AbsOverride,
+ LOCAL -> Local,
+ JAVA -> JavaDefined,
+ SYNTHETIC -> Synthetic,
+ STABLE -> Stable,
+ STATIC -> JavaStatic,
+ CASEACCESSOR -> CaseAccessor,
+ DEFAULTPARAM -> (DefaultParameterized, Trait),
+ BRIDGE -> Bridge,
+ ACCESSOR -> Accessor,
+ SUPERACCESSOR -> SuperAccessor,
+ PARAMACCESSOR -> ParamAccessor,
+ MODULEVAR -> Scala2ModuleVar,
+ LAZY -> Lazy,
+ MIXEDIN -> (MixedIn, Scala2Existential),
+ EXPANDEDNAME -> ExpandedName,
+ IMPLCLASS -> (Scala2PreSuper, ImplClass),
+ SPECIALIZED -> Specialized,
+ VBRIDGE -> VBridge,
+ VARARGS -> JavaVarargs,
+ ENUM -> Enum)
+
+ // generate initial maps from Scala flags to Dotty flags
+ val termMap, typeMap = new Array[Long](64)
+ for (idx <- 0 until ScalaFlagEnd)
+ corr get (1L << idx) match {
+ case Some((termFlag: FlagSet, typeFlag: FlagSet)) =>
+ termMap(idx) |= termFlag.bits
+ typeMap(idx) |= typeFlag.bits
+ case Some(commonFlag: FlagSet) =>
+ termMap(idx) |= commonFlag.toTermFlags.bits
+ typeMap(idx) |= commonFlag.toTypeFlags.bits
+ case _ =>
+ }
+
+ // Convert map so that it maps chunks of ChunkBits size at once
+ // instead of single bits.
+ def chunkMap(xs: Array[Long]): FlagMap = {
+ val chunked = Array.ofDim[Long](
+ (xs.length + ChunkBits - 1) / ChunkBits, ChunkSize)
+ for (i <- 0 until chunked.length)
+ for (j <- 0 until ChunkSize)
+ for (k <- 0 until ChunkBits)
+ if ((j & (1 << k)) != 0)
+ chunked(i)(j) |= xs(i * ChunkBits + k)
+ chunked
+ }
+
+ (chunkMap(termMap), chunkMap(typeMap))
+ }
+
+ def unpickleScalaFlags(sflags: Long, isType: Boolean): FlagSet = {
+ val map: FlagMap = if (isType) scalaTypeFlagMap else scalaTermFlagMap
+ val shift = ChunkBits
+ val mask = ChunkSize - 1
+ assert(6 * ChunkBits == ScalaFlagEnd)
+ FlagSet(
+ map(0)((sflags >>> (shift * 0)).toInt & mask) |
+ map(1)((sflags >>> (shift * 1)).toInt & mask) |
+ map(2)((sflags >>> (shift * 2)).toInt & mask) |
+ map(3)((sflags >>> (shift * 3)).toInt & mask) |
+ map(4)((sflags >>> (shift * 4)).toInt & mask) |
+ map(5)((sflags >>> (shift * 5)).toInt & mask)
+ )
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
new file mode 100644
index 000000000..b01f6cc6a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
@@ -0,0 +1,1260 @@
+package dotty.tools
+package dotc
+package core
+package unpickleScala2
+
+import java.io.IOException
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+
+import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
+import StdNames._, Denotations._, NameOps._, Flags._, Constants._, Annotations._
+import dotty.tools.dotc.typer.ProtoTypes.{FunProtoTyped, FunProto}
+import util.Positions._
+import dotty.tools.dotc.ast.{tpd, Trees, untpd}, ast.tpd._
+import ast.untpd.Modifiers
+import printing.Texts._
+import printing.Printer
+import io.AbstractFile
+import util.common._
+import typer.Checking.checkNonCyclic
+import PickleBuffer._
+import scala.reflect.internal.pickling.PickleFormat._
+import Decorators._
+import TypeApplications._
+import classfile.ClassfileParser
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import scala.annotation.switch
+
+object Scala2Unpickler {
+
+ /** Exception thrown if classfile is corrupted */
+ class BadSignature(msg: String) extends RuntimeException(msg)
+
+ case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType {
+ override def fallbackToText(printer: Printer): Text =
+ "[" ~ printer.dclsText(tparams, ", ") ~ "]" ~ printer.toText(tpe)
+ }
+
+ /** Temporary type for classinfos, will be decomposed on completion of the class */
+ case class TempClassInfoType(parentTypes: List[Type], decls: Scope, clazz: Symbol) extends UncachedGroundType
+
+ /** Convert temp poly type to poly type and leave other types alone. */
+ def translateTempPoly(tp: Type)(implicit ctx: Context): Type = tp match {
+ case TempPolyType(tparams, restpe) => restpe.LambdaAbstract(tparams)
+ case tp => tp
+ }
+
+ def addConstructorTypeParams(denot: SymDenotation)(implicit ctx: Context) = {
+ assert(denot.isConstructor)
+ denot.info = denot.info.LambdaAbstract(denot.owner.typeParams)
+ }
+
+ /** Convert array parameters denoting a repeated parameter of a Java method
+ * to `RepeatedParamClass` types.
+ */
+ def arrayToRepeated(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp @ MethodType(paramNames, paramTypes) =>
+ val lastArg = paramTypes.last
+ assert(lastArg isRef defn.ArrayClass)
+ val elemtp0 :: Nil = lastArg.baseArgInfos(defn.ArrayClass)
+ val elemtp = elemtp0 match {
+ case AndType(t1, t2) if t1.typeSymbol.isAbstractType && (t2 isRef defn.ObjectClass) =>
+ t1 // drop intersection with Object for abstract types in varargs. UnCurry can handle them.
+ case _ =>
+ elemtp0
+ }
+ tp.derivedMethodType(
+ paramNames,
+ paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp),
+ tp.resultType)
+ case tp: PolyType =>
+ tp.derivedPolyType(tp.paramNames, tp.paramBounds, arrayToRepeated(tp.resultType))
+ }
+
+ def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context) =
+ if (scope.lookup(nme.CONSTRUCTOR) == NoSymbol) {
+ val constr = ctx.newDefaultConstructor(cls)
+ addConstructorTypeParams(constr)
+ cls.enter(constr, scope)
+ }
+
+ def setClassInfo(denot: ClassDenotation, info: Type, selfInfo: Type = NoType)(implicit ctx: Context): Unit = {
+ val cls = denot.classSymbol
+ val (tparams, TempClassInfoType(parents, decls, clazz)) = info match {
+ case TempPolyType(tps, cinfo) => (tps, cinfo)
+ case cinfo => (Nil, cinfo)
+ }
+ val ost =
+ if ((selfInfo eq NoType) && (denot is ModuleClass) && denot.sourceModule.exists)
+ // it seems sometimes the source module does not exist for a module class.
+ // An example is `scala.reflect.internal.Trees.Template$. Without the
+ // `denot.sourceModule.exists` provision i859.scala crashes in the backend.
+ denot.owner.thisType select denot.sourceModule
+ else selfInfo
+ val tempInfo = new TempClassInfo(denot.owner.thisType, denot.classSymbol, decls, ost)
+ denot.info = tempInfo // first rough info to avoid CyclicReferences
+ var parentRefs = ctx.normalizeToClassRefs(parents, cls, decls)
+ if (parentRefs.isEmpty) parentRefs = defn.ObjectType :: Nil
+ for (tparam <- tparams) {
+ val tsym = decls.lookup(tparam.name)
+ if (tsym.exists) tsym.setFlag(TypeParam)
+ else denot.enter(tparam, decls)
+ }
+ if (!(denot.flagsUNSAFE is JavaModule)) ensureConstructor(denot.symbol.asClass, decls)
+
+ val scalacCompanion = denot.classSymbol.scalacLinkedClass
+
+ def registerCompanionPair(module: Symbol, claz: Symbol) = {
+ import transform.SymUtils._
+ module.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, claz)
+ if (claz.isClass) {
+ claz.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, module)
+ }
+ }
+
+ if (denot.flagsUNSAFE is Module) {
+ registerCompanionPair(denot.classSymbol, scalacCompanion)
+ } else {
+ registerCompanionPair(scalacCompanion, denot.classSymbol)
+ }
+
+ tempInfo.finalize(denot, parentRefs) // install final info, except possibly for typeparams ordering
+ denot.ensureTypeParamsInCorrectOrder()
+ }
+}
+
+/** Unpickle symbol table information descending from a class and/or module root
+ * from an array of bytes.
+ * @param bytes bytearray from which we unpickle
+ * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleroot the top-level module class which is unpickled, or NoSymbol if inapplicable
+ * @param filename filename associated with bytearray, only used for error messages
+ */
+class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot: ClassDenotation)(ictx: Context)
+ extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded {
+
+ def showPickled() = {
+ atReadPos(0, () => {
+ println(s"classRoot = ${classRoot.debugString}, moduleClassRoot = ${moduleClassRoot.debugString}")
+ util.ShowPickled.printFile(this)
+ })
+ }
+
+ // print("unpickling "); showPickled() // !!! DEBUG
+
+ import Scala2Unpickler._
+
+ val moduleRoot = moduleClassRoot.sourceModule(ictx).denot(ictx)
+ assert(moduleRoot.isTerm)
+
+ checkVersion(ictx)
+
+ private val loadingMirror = defn(ictx) // was: mirrorThatLoaded(classRoot)
+
+ /** A map from entry numbers to array offsets */
+ private val index = createIndex
+
+ /** A map from entry numbers to symbols, types, or annotations */
+ private val entries = new Array[AnyRef](index.length)
+
+ /** A map from symbols to their associated `decls` scopes */
+ private val symScopes = mutable.AnyRefMap[Symbol, Scope]()
+
+ protected def errorBadSignature(msg: String, original: Option[RuntimeException] = None)(implicit ctx: Context) = {
+ val ex = new BadSignature(
+ i"""error reading Scala signature of $classRoot from $source:
+ |error occurred at position $readIndex: $msg""")
+ if (ctx.debug || true) original.getOrElse(ex).printStackTrace() // temporarily enable printing of original failure signature to debug failing builds
+ throw ex
+ }
+
+ protected def handleRuntimeException(ex: RuntimeException)(implicit ctx: Context) = ex match {
+ case ex: BadSignature => throw ex
+ case _ => errorBadSignature(s"a runtime exception occurred: $ex", Some(ex))
+ }
+
+ def run()(implicit ctx: Context) =
+ try {
+ var i = 0
+ while (i < index.length) {
+ if (entries(i) == null && isSymbolEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val sym = readSymbol()
+ entries(i) = sym
+ sym.infoOrCompleter match {
+ case info: ClassUnpickler => info.init()
+ case _ =>
+ }
+ readIndex = savedIndex
+ }
+ i += 1
+ }
+ // read children last, fix for #3951
+ i = 0
+ while (i < index.length) {
+ if (entries(i) == null) {
+ if (isSymbolAnnotationEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readSymbolAnnotation()
+ readIndex = savedIndex
+ } else if (isChildrenEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readChildren()
+ readIndex = savedIndex
+ }
+ }
+ i += 1
+ }
+ } catch {
+ case ex: RuntimeException => handleRuntimeException(ex)
+ }
+
+ def source(implicit ctx: Context): AbstractFile = {
+ val f = classRoot.symbol.associatedFile
+ if (f != null) f else moduleClassRoot.symbol.associatedFile
+ }
+
+ private def checkVersion(implicit ctx: Context): Unit = {
+ val major = readNat()
+ val minor = readNat()
+ if (major != MajorVersion || minor > MinorVersion)
+ throw new IOException("Scala signature " + classRoot.fullName.decode +
+ " has wrong version\n expected: " +
+ MajorVersion + "." + MinorVersion +
+ "\n found: " + major + "." + minor +
+ " in " + source)
+ }
+
+ /** The `decls` scope associated with given symbol */
+ protected def symScope(sym: Symbol) = symScopes.getOrElseUpdate(sym, newScope)
+
+ /** Does entry represent an (internal) symbol */
+ protected def isSymbolEntry(i: Int)(implicit ctx: Context): Boolean = {
+ val tag = bytes(index(i)).toInt
+ (firstSymTag <= tag && tag <= lastSymTag &&
+ (tag != CLASSsym || !isRefinementSymbolEntry(i)))
+ }
+
+ /** Does entry represent an (internal or external) symbol */
+ protected def isSymbolRef(i: Int): Boolean = {
+ val tag = bytes(index(i))
+ (firstSymTag <= tag && tag <= lastExtSymTag)
+ }
+
+ /** Does entry represent a name? */
+ protected def isNameEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == TERMname || tag == TYPEname
+ }
+
+ /** Does entry represent a symbol annotation? */
+ protected def isSymbolAnnotationEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == SYMANNOT
+ }
+
+ /** Does the entry represent children of a symbol? */
+ protected def isChildrenEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == CHILDREN
+ }
+
+ /** Does entry represent a refinement symbol?
+ * pre: Entry is a class symbol
+ */
+ protected def isRefinementSymbolEntry(i: Int)(implicit ctx: Context): Boolean = {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val tag = readByte().toInt
+ assert(tag == CLASSsym)
+
+ readNat(); // read length
+ val result = readNameRef() == tpnme.REFINE_CLASS
+ readIndex = savedIndex
+ result
+ }
+
+ protected def isRefinementClass(sym: Symbol)(implicit ctx: Context): Boolean =
+ sym.name == tpnme.REFINE_CLASS
+
+ protected def isLocal(sym: Symbol)(implicit ctx: Context) = isUnpickleRoot(sym.topLevelClass)
+
+ protected def isUnpickleRoot(sym: Symbol)(implicit ctx: Context) = {
+ val d = sym.denot
+ d == moduleRoot || d == moduleClassRoot || d == classRoot
+ }
+
+ /** If entry at <code>i</code> is undefined, define it by performing
+ * operation <code>op</code> with <code>readIndex at start of i'th
+ * entry. Restore <code>readIndex</code> afterwards.
+ */
+ protected def at[T <: AnyRef](i: Int, op: () => T): T = {
+ var r = entries(i)
+ if (r eq null) {
+ r = atReadPos(index(i), op)
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ }
+ r.asInstanceOf[T]
+ }
+
+ protected def atReadPos[T](start: Int, op: () => T): T = {
+ val savedIndex = readIndex
+ readIndex = start
+ try op()
+ finally readIndex = savedIndex
+ }
+
+ /** Read a name */
+ protected def readName()(implicit ctx: Context): Name = {
+ val tag = readByte()
+ val len = readNat()
+ tag match {
+ case TERMname => termName(bytes, readIndex, len)
+ case TYPEname => typeName(bytes, readIndex, len)
+ case _ => errorBadSignature("bad name tag: " + tag)
+ }
+ }
+ protected def readTermName()(implicit ctx: Context): TermName = readName().toTermName
+ protected def readTypeName()(implicit ctx: Context): TypeName = readName().toTypeName
+
+ /** Read a symbol */
+ protected def readSymbol()(implicit ctx: Context): Symbol = readDisambiguatedSymbol(alwaysTrue)()
+
+ /** Read a symbol, with possible disambiguation */
+ protected def readDisambiguatedSymbol(p: Symbol => Boolean)()(implicit ctx: Context): Symbol = {
+ val start = indexCoord(readIndex)
+ val tag = readByte()
+ val end = readNat() + readIndex
+ def atEnd = readIndex == end
+
+ def readExtSymbol(): Symbol = {
+ val name = readNameRef()
+ val owner = if (atEnd) loadingMirror.RootClass else readSymbolRef()
+
+ def adjust(denot: Denotation) = {
+ val denot1 = denot.disambiguate(d => p(d.symbol))
+ val sym = denot1.symbol
+ if (denot.exists && !denot1.exists) { // !!!DEBUG
+ val alts = denot.alternatives map (d => d + ":" + d.info + "/" + d.signature)
+ System.err.println(s"!!! disambiguation failure: $alts")
+ val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => d + ":" + d.info + "/" + d.signature)
+ System.err.println(s"!!! all members: $members")
+ }
+ if (tag == EXTref) sym else sym.moduleClass
+ }
+
+ def fromName(name: Name): Symbol = name.toTermName match {
+ case nme.ROOT => loadingMirror.RootClass
+ case nme.ROOTPKG => loadingMirror.RootPackage
+ case _ =>
+ def declIn(owner: Symbol) = adjust(owner.info.decl(name))
+ val sym = declIn(owner)
+ if (sym.exists || owner.ne(defn.ObjectClass)) sym else declIn(defn.AnyClass)
+ }
+
+ def slowSearch(name: Name): Symbol =
+ owner.info.decls.find(_.name == name).getOrElse(NoSymbol)
+
+ def nestedObjectSymbol: Symbol = {
+ // If the owner is overloaded (i.e. a method), it's not possible to select the
+ // right member, so return NoSymbol. This can only happen when unpickling a tree.
+ // the "case Apply" in readTree() takes care of selecting the correct alternative
+ // after parsing the arguments.
+ //if (owner.isOverloaded)
+ // return NoSymbol
+
+ if (tag == EXTMODCLASSref) {
+ val module = owner.info.decl(name.toTermName).suchThat(_ is Module)
+ module.info // force it, as completer does not yet point to module class.
+ module.symbol.moduleClass
+
+ /* was:
+ val moduleVar = owner.info.decl(name.toTermName.moduleVarName).symbol
+ if (moduleVar.isLazyAccessor)
+ return moduleVar.lazyAccessor.lazyAccessor
+ */
+ } else NoSymbol
+ }
+
+ // println(s"read ext symbol $name from ${owner.denot.debugString} in ${classRoot.debugString}") // !!! DEBUG
+
+ // (1) Try name.
+ fromName(name) orElse {
+ // (2) Try with expanded name. Can happen if references to private
+ // symbols are read from outside: for instance when checking the children
+ // of a class. See #1722.
+ fromName(name.toTermName.expandedName(owner)) orElse {
+ // (3) Try as a nested object symbol.
+ nestedObjectSymbol orElse {
+ // (4) Call the mirror's "missing" hook.
+ adjust(ctx.base.missingHook(owner, name)) orElse {
+ // println(owner.info.decls.toList.map(_.debugString).mkString("\n ")) // !!! DEBUG
+ // }
+ // (5) Create a stub symbol to defer hard failure a little longer.
+ System.err.println(i"***** missing reference, looking for $name in $owner")
+ System.err.println(i"decls = ${owner.info.decls}")
+ owner.info.decls.checkConsistent()
+ if (slowSearch(name).exists)
+ System.err.println(i"**** slow search found: ${slowSearch(name)}")
+ if (ctx.debug) Thread.dumpStack()
+ ctx.newStubSymbol(owner, name, source)
+ }
+ }
+ }
+ }
+ }
+
+ tag match {
+ case NONEsym => return NoSymbol
+ case EXTref | EXTMODCLASSref => return readExtSymbol()
+ case _ =>
+ }
+
+ // symbols that were pickled with Pickler.writeSymInfo
+ val nameref = readNat()
+ val name0 = at(nameref, readName)
+ val owner = readSymbolRef()
+
+ var flags = unpickleScalaFlags(readLongNat(), name0.isTypeName)
+ if (flags is DefaultParameter) {
+ // DefaultParameterized flag now on method, not parameter
+ //assert(flags is Param, s"$name0 in $owner")
+ flags = flags &~ DefaultParameterized
+ owner.setFlag(DefaultParameterized)
+ }
+
+ val name1 = name0.adjustIfModuleClass(flags)
+ val name = if (name1 == nme.TRAIT_CONSTRUCTOR) nme.CONSTRUCTOR else name1
+
+ def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner) && !(flags is ModuleClass)
+ def isModuleClassRoot = (name == moduleClassRoot.name) && (owner == moduleClassRoot.owner) && (flags is Module)
+ def isModuleRoot = (name == moduleClassRoot.name.sourceModuleName) && (owner == moduleClassRoot.owner) && (flags is Module)
+
+ //if (isClassRoot) println(s"classRoot of $classRoot found at $readIndex, flags = $flags") // !!! DEBUG
+ //if (isModuleRoot) println(s"moduleRoot of $moduleRoot found at $readIndex, flags = $flags") // !!! DEBUG
+ //if (isModuleClassRoot) println(s"moduleClassRoot of $moduleClassRoot found at $readIndex, flags = $flags") // !!! DEBUG
+
+ def completeRoot(denot: ClassDenotation, completer: LazyType): Symbol = {
+ denot.setFlag(flags)
+ denot.resetFlag(Touched) // allow one more completion
+ denot.info = completer
+ denot.symbol
+ }
+
+ def finishSym(sym: Symbol): Symbol = {
+ if (sym.isClass) sym.setFlag(Scala2x)
+ val owner = sym.owner
+ if (owner.isClass &&
+ !( isUnpickleRoot(sym)
+ || (sym is Scala2Existential)
+ || isRefinementClass(sym)
+ )
+ )
+ owner.asClass.enter(sym, symScope(owner))
+ else if (isRefinementClass(owner))
+ symScope(owner).openForMutations.enter(sym)
+ sym
+ }
+
+ finishSym(tag match {
+ case TYPEsym | ALIASsym =>
+ var name1 = name.asTypeName
+ var flags1 = flags
+ if (flags is TypeParam) {
+ name1 = name1.expandedName(owner)
+ flags1 |= owner.typeParamCreationFlags | ExpandedName
+ }
+ ctx.newSymbol(owner, name1, flags1, localMemberUnpickler, coord = start)
+ case CLASSsym =>
+ var infoRef = readNat()
+ if (isSymbolRef(infoRef)) infoRef = readNat()
+ if (isClassRoot)
+ completeRoot(
+ classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol, infoRef))
+ else if (isModuleClassRoot)
+ completeRoot(
+ moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef))
+ else if (name == tpnme.REFINE_CLASS)
+ // create a type alias instead
+ ctx.newSymbol(owner, name, flags, localMemberUnpickler, coord = start)
+ else {
+ def completer(cls: Symbol) = {
+ val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls)
+ if (flags is ModuleClass)
+ unpickler withSourceModule (implicit ctx =>
+ cls.owner.info.decls.lookup(cls.name.sourceModuleName)
+ .suchThat(_ is Module).symbol)
+ else unpickler
+ }
+ ctx.newClassSymbol(owner, name.asTypeName, flags, completer, coord = start)
+ }
+ case VALsym =>
+ ctx.newSymbol(owner, name.asTermName, flags, localMemberUnpickler, coord = start)
+ case MODULEsym =>
+ if (isModuleRoot) {
+ moduleRoot setFlag flags
+ moduleRoot.symbol
+ } else ctx.newSymbol(owner, name.asTermName, flags,
+ new LocalUnpickler() withModuleClass(implicit ctx =>
+ owner.info.decls.lookup(name.moduleClassName)
+ .suchThat(_ is Module).symbol)
+ , coord = start)
+ case _ =>
+ errorBadSignature("bad symbol tag: " + tag)
+ })
+ }
+
+ class LocalUnpickler extends LazyType {
+ def startCoord(denot: SymDenotation): Coord = denot.symbol.coord
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = try {
+ def parseToCompletion(denot: SymDenotation)(implicit ctx: Context) = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ def atEnd = readIndex == end
+ val unusedNameref = readNat()
+ val unusedOwnerref = readNat()
+ val unusedFlags = readLongNat()
+ var inforef = readNat()
+ denot.privateWithin =
+ if (!isSymbolRef(inforef)) NoSymbol
+ else {
+ val pw = at(inforef, readSymbol)
+ inforef = readNat()
+ pw
+ }
+ // println("reading type for " + denot) // !!! DEBUG
+ val tp = at(inforef, readType)
+ denot match {
+ case denot: ClassDenotation =>
+ val selfInfo = if (atEnd) NoType else readTypeRef()
+ setClassInfo(denot, tp, selfInfo)
+ case denot =>
+ val tp1 = translateTempPoly(tp)
+ denot.info =
+ if (tag == ALIASsym) TypeAlias(tp1)
+ else if (denot.isType) checkNonCyclic(denot.symbol, tp1, reportErrors = false)
+ // we need the checkNonCyclic call to insert LazyRefs for F-bounded cycles
+ else if (!denot.is(Param)) tp1.underlyingIfRepeated(isJava = false)
+ else tp1
+ if (denot.isConstructor) addConstructorTypeParams(denot)
+ if (atEnd) {
+ assert(!(denot is SuperAccessor), denot)
+ } else {
+ assert(denot is (SuperAccessor | ParamAccessor), denot)
+ def disambiguate(alt: Symbol) = { // !!! DEBUG
+ ctx.debugTraceIndented(s"disambiguating ${denot.info} =:= ${denot.owner.thisType.memberInfo(alt)} ${denot.owner}") {
+ denot.info matches denot.owner.thisType.memberInfo(alt)
+ }
+ }
+ val alias = readDisambiguatedSymbolRef(disambiguate).asTerm
+ denot.addAnnotation(Annotation.makeAlias(alias))
+ }
+ }
+ // println(s"unpickled ${denot.debugString}, info = ${denot.info}") !!! DEBUG
+ }
+ atReadPos(startCoord(denot).toIndex,
+ () => parseToCompletion(denot)(
+ ctx.addMode(Mode.Scala2Unpickling).withPhaseNoLater(ctx.picklerPhase)))
+ } catch {
+ case ex: RuntimeException => handleRuntimeException(ex)
+ }
+ }
+
+ object localMemberUnpickler extends LocalUnpickler
+
+ class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter {
+ private def readTypeParams()(implicit ctx: Context): List[TypeSymbol] = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ if (tag == POLYtpe) {
+ val unusedRestpeRef = readNat()
+ until(end, readSymbolRef).asInstanceOf[List[TypeSymbol]]
+ } else Nil
+ }
+ private def loadTypeParams(implicit ctx: Context) =
+ atReadPos(index(infoRef), readTypeParams)
+
+ /** Force reading type params early, we need them in setClassInfo of subclasses. */
+ def init()(implicit ctx: Context) = loadTypeParams
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] =
+ loadTypeParams
+ }
+
+ def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int) =
+ (new ClassUnpickler(infoRef) with SymbolLoaders.SecondCompleter {
+ override def startCoord(denot: SymDenotation): Coord = start
+ }) withDecls symScope(cls) withSourceModule (_ => module)
+
+ /** Convert
+ * tp { type name = sym } forSome { sym >: L <: H }
+ * to
+ * tp { name >: L <: H }
+ * and
+ * tp { name: sym } forSome { sym <: T with Singleton }
+ * to
+ * tp { name: T }
+ */
+ def elimExistentials(boundSyms: List[Symbol], tp: Type)(implicit ctx: Context): Type = {
+ // Need to be careful not to run into cyclic references here (observed when
+ // comiling t247.scala). That's why we avoiud taking `symbol` of a TypeRef
+ // unless names match up.
+ val isBound = (tp: Type) => {
+ def refersTo(tp: Type, sym: Symbol): Boolean = tp match {
+ case tp @ TypeRef(_, name) => sym.name == name && sym == tp.symbol
+ case tp: TypeVar => refersTo(tp.underlying, sym)
+ case tp : LazyRef => refersTo(tp.ref, sym)
+ case _ => false
+ }
+ boundSyms.exists(refersTo(tp, _))
+ }
+ // Cannot use standard `existsPart` method because it calls `lookupRefined`
+ // which can cause CyclicReference errors.
+ val isBoundAccumulator = new ExistsAccumulator(isBound) {
+ override def foldOver(x: Boolean, tp: Type): Boolean = tp match {
+ case tp: TypeRef => applyToPrefix(x, tp)
+ case _ => super.foldOver(x, tp)
+ }
+ }
+ def removeSingleton(tp: Type): Type =
+ if (tp isRef defn.SingletonClass) defn.AnyType else tp
+ def elim(tp: Type): Type = tp match {
+ case tp @ RefinedType(parent, name, rinfo) =>
+ val parent1 = elim(tp.parent)
+ rinfo match {
+ case TypeAlias(info: TypeRef) if isBound(info) =>
+ RefinedType(parent1, name, info.symbol.info)
+ case info: TypeRef if isBound(info) =>
+ val info1 = info.symbol.info
+ assert(info1.derivesFrom(defn.SingletonClass))
+ RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _))
+ case info =>
+ tp.derivedRefinedType(parent1, name, info)
+ }
+ case tp @ HKApply(tycon, args) =>
+ val tycon1 = tycon.safeDealias
+ def mapArg(arg: Type) = arg match {
+ case arg: TypeRef if isBound(arg) => arg.symbol.info
+ case _ => arg
+ }
+ if (tycon1 ne tycon) elim(tycon1.appliedTo(args))
+ else tp.derivedAppliedType(tycon, args.map(mapArg))
+ case _ =>
+ tp
+ }
+ val tp1 = elim(tp)
+ if (isBoundAccumulator(false, tp1)) {
+ val anyTypes = boundSyms map (_ => defn.AnyType)
+ val boundBounds = boundSyms map (_.info.bounds.hi)
+ val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes)
+ ctx.warning(s"""failure to eliminate existential
+ |original type : $tp forSome {${ctx.dclsText(boundSyms, "; ").show}
+ |reduces to : $tp1
+ |type used instead: $tp2
+ |proceed at own risk.""".stripMargin)
+ tp2
+ } else tp1
+ }
+
+ /** Read a type
+ *
+ * @param forceProperType is used to ease the transition to NullaryMethodTypes (commentmarker: NMT_TRANSITION)
+ * the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe))
+ * (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor)
+ */
+ protected def readType()(implicit ctx: Context): Type = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ (tag: @switch) match {
+ case NOtpe =>
+ NoType
+ case NOPREFIXtpe =>
+ NoPrefix
+ case THIStpe =>
+ readSymbolRef().thisType
+ case SINGLEtpe =>
+ val pre = readTypeRef()
+ val sym = readDisambiguatedSymbolRef(_.info.isParameterless)
+ if (isLocal(sym) || (pre == NoPrefix)) pre select sym
+ else TermRef.withSig(pre, sym.name.asTermName, Signature.NotAMethod) // !!! should become redundant
+ case SUPERtpe =>
+ val thistpe = readTypeRef()
+ val supertpe = readTypeRef()
+ SuperType(thistpe, supertpe)
+ case CONSTANTtpe =>
+ ConstantType(readConstantRef())
+ case TYPEREFtpe =>
+ var pre = readTypeRef()
+ val sym = readSymbolRef()
+ pre match {
+ case thispre: ThisType =>
+ // The problem is that class references super.C get pickled as
+ // this.C. Dereferencing the member might then get an overriding class
+ // instance. The problem arises for instance for LinkedHashMap#MapValues
+ // and also for the inner Transform class in all views. We fix it by
+ // replacing the this with the appropriate super.
+ if (sym.owner != thispre.cls) {
+ val overriding = thispre.cls.info.decls.lookup(sym.name)
+ if (overriding.exists && overriding != sym) {
+ val base = pre.baseTypeWithArgs(sym.owner)
+ assert(base.exists)
+ pre = SuperType(thispre, base)
+ }
+ }
+ case _ =>
+ }
+ val tycon =
+ if (sym.isClass && sym.is(Scala2x) && !sym.owner.is(Package))
+ // used fixed sym for Scala 2 inner classes, because they might be shadowed
+ TypeRef.withFixedSym(pre, sym.name.asTypeName, sym.asType)
+ else if (isLocal(sym) || pre == NoPrefix) {
+ val pre1 = if ((pre eq NoPrefix) && (sym is TypeParam)) sym.owner.thisType else pre
+ pre1 select sym
+ }
+ else TypeRef(pre, sym.name.asTypeName)
+ val args = until(end, readTypeRef)
+ if (sym == defn.ByNameParamClass2x) ExprType(args.head)
+ else if (args.nonEmpty) tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args))
+ else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams)
+ else tycon
+ case TYPEBOUNDStpe =>
+ TypeBounds(readTypeRef(), readTypeRef())
+ case REFINEDtpe =>
+ val clazz = readSymbolRef()
+ val decls = symScope(clazz)
+ symScopes(clazz) = EmptyScope // prevent further additions
+ val parents = until(end, readTypeRef)
+ val parent = parents.reduceLeft(AndType(_, _))
+ if (decls.isEmpty) parent
+ else {
+ def subst(info: Type, rt: RecType) =
+ if (clazz.isClass) info.substThis(clazz.asClass, RecThis(rt))
+ else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
+ def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info)
+ val refined = (parent /: decls.toList)(addRefinement)
+ RecType.closeOver(rt => subst(refined, rt))
+ }
+ case CLASSINFOtpe =>
+ val clazz = readSymbolRef()
+ TempClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ val restpe = readTypeRef()
+ val params = until(end, readSymbolRef)
+ def isImplicit =
+ tag == IMPLICITMETHODtpe ||
+ params.nonEmpty && (params.head is Implicit)
+ val maker = if (isImplicit) ImplicitMethodType else MethodType
+ maker.fromSymbols(params, restpe)
+ case POLYtpe =>
+ val restpe = readTypeRef()
+ val typeParams = until(end, readSymbolRef)
+ if (typeParams.nonEmpty) TempPolyType(typeParams.asInstanceOf[List[TypeSymbol]], restpe.widenExpr)
+ else ExprType(restpe)
+ case EXISTENTIALtpe =>
+ val restpe = readTypeRef()
+ val boundSyms = until(end, readSymbolRef)
+ elimExistentials(boundSyms, restpe)
+ case ANNOTATEDtpe =>
+ AnnotatedType.make(readTypeRef(), until(end, readAnnotationRef))
+ case _ =>
+ noSuchTypeTag(tag, end)
+ }
+ }
+
+ def readTypeParams()(implicit ctx: Context): List[Symbol] = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ if (tag == POLYtpe) {
+ val unusedRestperef = readNat()
+ until(end, readSymbolRef)
+ } else Nil
+ }
+
+ def noSuchTypeTag(tag: Int, end: Int)(implicit ctx: Context): Type =
+ errorBadSignature("bad type tag: " + tag)
+
+ /** Read a constant */
+ protected def readConstant()(implicit ctx: Context): Constant = {
+ val tag = readByte().toInt
+ val len = readNat()
+ (tag: @switch) match {
+ case LITERALunit => Constant(())
+ case LITERALboolean => Constant(readLong(len) != 0L)
+ case LITERALbyte => Constant(readLong(len).toByte)
+ case LITERALshort => Constant(readLong(len).toShort)
+ case LITERALchar => Constant(readLong(len).toChar)
+ case LITERALint => Constant(readLong(len).toInt)
+ case LITERALlong => Constant(readLong(len))
+ case LITERALfloat => Constant(intBitsToFloat(readLong(len).toInt))
+ case LITERALdouble => Constant(longBitsToDouble(readLong(len)))
+ case LITERALstring => Constant(readNameRef().toString)
+ case LITERALnull => Constant(null)
+ case LITERALclass => Constant(readTypeRef())
+ case LITERALenum => Constant(readSymbolRef())
+ case _ => noSuchConstantTag(tag, len)
+ }
+ }
+
+ def noSuchConstantTag(tag: Int, len: Int)(implicit ctx: Context): Constant =
+ errorBadSignature("bad constant tag: " + tag)
+
+ /** Read children and store them into the corresponding symbol.
+ */
+ protected def readChildren()(implicit ctx: Context): Unit = {
+ val tag = readByte()
+ assert(tag == CHILDREN)
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ while (readIndex != end)
+ target.addAnnotation(Annotation.makeChild(readSymbolRef()))
+ }
+
+ /* Read a reference to a pickled item */
+ protected def readSymbolRef()(implicit ctx: Context): Symbol = { //OPT inlined from: at(readNat(), readSymbol) to save on closure creation
+ val i = readNat()
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = readSymbol()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[Symbol]
+ }
+
+ protected def readDisambiguatedSymbolRef(p: Symbol => Boolean)(implicit ctx: Context): Symbol =
+ at(readNat(), readDisambiguatedSymbol(p))
+
+ protected def readNameRef()(implicit ctx: Context): Name = at(readNat(), readName)
+ protected def readTypeRef()(implicit ctx: Context): Type = at(readNat(), () => readType()) // after the NMT_TRANSITION period, we can leave off the () => ... ()
+ protected def readConstantRef()(implicit ctx: Context): Constant = at(readNat(), readConstant)
+
+ protected def readTypeNameRef()(implicit ctx: Context): TypeName = readNameRef().toTypeName
+ protected def readTermNameRef()(implicit ctx: Context): TermName = readNameRef().toTermName
+
+ protected def readAnnotationRef()(implicit ctx: Context): Annotation = at(readNat(), readAnnotation)
+
+ protected def readModifiersRef(isType: Boolean)(implicit ctx: Context): Modifiers = at(readNat(), () => readModifiers(isType))
+ protected def readTreeRef()(implicit ctx: Context): Tree = at(readNat(), readTree)
+
+ /** Read an annotation argument, which is pickled either
+ * as a Constant or a Tree.
+ */
+ protected def readAnnotArg(i: Int)(implicit ctx: Context): Tree = bytes(index(i)) match {
+ case TREE => at(i, readTree)
+ case _ => Literal(at(i, readConstant))
+ }
+
+ /** Read a ClassfileAnnotArg (argument to a classfile annotation)
+ */
+ private def readArrayAnnotArg()(implicit ctx: Context): Tree = {
+ readByte() // skip the `annotargarray` tag
+ val end = readNat() + readIndex
+ // array elements are trees representing instances of scala.annotation.Annotation
+ SeqLiteral(
+ until(end, () => readClassfileAnnotArg(readNat())),
+ TypeTree(defn.AnnotationType))
+ }
+
+ private def readAnnotInfoArg()(implicit ctx: Context): Tree = {
+ readByte() // skip the `annotinfo` tag
+ val end = readNat() + readIndex
+ readAnnotationContents(end)
+ }
+
+ protected def readClassfileAnnotArg(i: Int)(implicit ctx: Context): Tree = bytes(index(i)) match {
+ case ANNOTINFO => at(i, readAnnotInfoArg)
+ case ANNOTARGARRAY => at(i, readArrayAnnotArg)
+ case _ => readAnnotArg(i)
+ }
+
+ /** Read an annotation's contents. Not to be called directly, use
+ * readAnnotation, readSymbolAnnotation, or readAnnotInfoArg
+ */
+ protected def readAnnotationContents(end: Int)(implicit ctx: Context): Tree = {
+ val atp = readTypeRef()
+ val args = {
+ val t = new ListBuffer[Tree]
+
+ while (readIndex != end) {
+ val argref = readNat()
+ t += {
+ if (isNameEntry(argref)) {
+ val name = at(argref, readName)
+ val arg = readClassfileAnnotArg(readNat())
+ NamedArg(name.asTermName, arg)
+ } else readAnnotArg(argref)
+ }
+ }
+ t.toList
+ }
+ // println(atp)
+ val targs = atp.argTypes
+
+ tpd.applyOverloaded(tpd.New(atp withoutArgs targs), nme.CONSTRUCTOR, args, targs, atp)
+}
+
+ /** Read an annotation and as a side effect store it into
+ * the symbol it requests. Called at top-level, for all
+ * (symbol, annotInfo) entries.
+ */
+ protected def readSymbolAnnotation()(implicit ctx: Context): Unit = {
+ val tag = readByte()
+ if (tag != SYMANNOT)
+ errorBadSignature("symbol annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ target.addAnnotation(deferredAnnot(end))
+ }
+
+ /** Read an annotation and return it. Used when unpickling
+ * an ANNOTATED(WSELF)tpe or a NestedAnnotArg
+ */
+ protected def readAnnotation()(implicit ctx: Context): Annotation = {
+ val tag = readByte()
+ if (tag != ANNOTINFO)
+ errorBadSignature("annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ deferredAnnot(end)
+ }
+
+ /** A deferred annotation that can be completed by reading
+ * the bytes between `readIndex` and `end`.
+ */
+ protected def deferredAnnot(end: Int)(implicit ctx: Context): Annotation = {
+ val start = readIndex
+ val atp = readTypeRef()
+ Annotation.deferred(
+ atp.typeSymbol, implicit ctx1 =>
+ atReadPos(start, () => readAnnotationContents(end)(ctx1.withPhase(ctx.phase))))
+ }
+
+ /* Read an abstract syntax tree */
+ protected def readTree()(implicit ctx: Context): Tree = {
+ val outerTag = readByte()
+ if (outerTag != TREE)
+ errorBadSignature("tree expected (" + outerTag + ")")
+ val end = readNat() + readIndex
+ val tag = readByte()
+ val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+
+ // Set by the three functions to follow. If symbol is non-null
+ // after the new tree 't' has been created, t has its Symbol
+ // set to symbol; and it always has its Type set to tpe.
+ var symbol: Symbol = null
+ var mods: Modifiers = null
+ var name: Name = null
+
+ /** Read a Symbol, Modifiers, and a Name */
+ def setSymModsName(): Unit = {
+ symbol = readSymbolRef()
+ mods = readModifiersRef(symbol.isType)
+ name = readNameRef()
+ }
+ /** Read a Symbol and a Name */
+ def setSymName(): Unit = {
+ symbol = readSymbolRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol */
+ def setSym(): Unit = {
+ symbol = readSymbolRef()
+ }
+
+ implicit val pos: Position = NoPosition
+
+ tag match {
+ case EMPTYtree =>
+ EmptyTree
+
+ case PACKAGEtree =>
+ setSym()
+ val pid = readTreeRef().asInstanceOf[RefTree]
+ val stats = until(end, readTreeRef)
+ PackageDef(pid, stats)
+
+ case CLASStree =>
+ setSymModsName()
+ val impl = readTemplateRef()
+ val tparams = until(end, readTypeDefRef)
+ val cls = symbol.asClass
+ val ((constr: DefDef) :: Nil, stats) =
+ impl.body.partition(_.symbol == cls.primaryConstructor)
+ ClassDef(cls, constr, tparams ++ stats)
+
+ case MODULEtree =>
+ setSymModsName()
+ ModuleDef(symbol.asTerm, readTemplateRef().body)
+
+ case VALDEFtree =>
+ setSymModsName()
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ ValDef(symbol.asTerm, rhs)
+
+ case DEFDEFtree =>
+ setSymModsName()
+ val tparams = times(readNat(), readTypeDefRef)
+ val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ DefDef(symbol.asTerm, rhs)
+
+ case TYPEDEFtree =>
+ setSymModsName()
+ val rhs = readTreeRef()
+ val tparams = until(end, readTypeDefRef)
+ TypeDef(symbol.asType)
+
+ case LABELtree =>
+ setSymName()
+ val rhs = readTreeRef()
+ val params = until(end, readIdentRef)
+ val ldef = DefDef(symbol.asTerm, rhs)
+ def isCaseLabel(sym: Symbol) = sym.name.startsWith(nme.CASEkw)
+ if (isCaseLabel(symbol)) ldef
+ else Block(ldef :: Nil, Apply(Ident(symbol.termRef), Nil))
+
+ case IMPORTtree =>
+ setSym()
+ val expr = readTreeRef()
+ val selectors = until(end, () => {
+ val fromName = readNameRef()
+ val toName = readNameRef()
+ val from = untpd.Ident(fromName)
+ val to = untpd.Ident(toName)
+ if (toName.isEmpty) from else untpd.Thicket(from, untpd.Ident(toName))
+ })
+
+ Import(expr, selectors)
+
+ case TEMPLATEtree =>
+ setSym()
+ val parents = times(readNat(), readTreeRef)
+ val self = readValDefRef()
+ val body = until(end, readTreeRef)
+ untpd.Template(???, parents, self, body) // !!! TODO: pull out primary constructor
+ .withType(symbol.namedType)
+
+ case BLOCKtree =>
+ val expr = readTreeRef()
+ val stats = until(end, readTreeRef)
+ Block(stats, expr)
+
+ case CASEtree =>
+ val pat = readTreeRef()
+ val guard = readTreeRef()
+ val body = readTreeRef()
+ CaseDef(pat, guard, body)
+
+ case ALTERNATIVEtree =>
+ Alternative(until(end, readTreeRef))
+
+ case STARtree =>
+ readTreeRef()
+ unimplementedTree("STAR")
+
+ case BINDtree =>
+ setSymName()
+ Bind(symbol.asTerm, readTreeRef())
+
+ case UNAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ UnApply(fun, Nil, args, defn.AnyType) // !!! this is wrong in general
+
+ case ARRAYVALUEtree =>
+ val elemtpt = readTreeRef()
+ val trees = until(end, readTreeRef)
+ SeqLiteral(trees, elemtpt)
+ // note can't deal with trees passed to Java methods as arrays here
+
+ case FUNCTIONtree =>
+ setSym()
+ val body = readTreeRef()
+ val vparams = until(end, readValDefRef)
+ val applyType = MethodType(vparams map (_.name), vparams map (_.tpt.tpe), body.tpe)
+ val applyMeth = ctx.newSymbol(symbol.owner, nme.apply, Method, applyType)
+ Closure(applyMeth, Function.const(body.changeOwner(symbol, applyMeth)) _)
+
+ case ASSIGNtree =>
+ val lhs = readTreeRef()
+ val rhs = readTreeRef()
+ Assign(lhs, rhs)
+
+ case IFtree =>
+ val cond = readTreeRef()
+ val thenp = readTreeRef()
+ val elsep = readTreeRef()
+ If(cond, thenp, elsep)
+
+ case MATCHtree =>
+ val selector = readTreeRef()
+ val cases = until(end, readCaseDefRef)
+ Match(selector, cases)
+
+ case RETURNtree =>
+ setSym()
+ Return(readTreeRef(), Ident(symbol.termRef))
+
+ case TREtree =>
+ val block = readTreeRef()
+ val finalizer = readTreeRef()
+ val catches = until(end, readCaseDefRef)
+ Try(block, catches, finalizer)
+
+ case THROWtree =>
+ Throw(readTreeRef())
+
+ case NEWtree =>
+ New(readTreeRef().tpe)
+
+ case TYPEDtree =>
+ val expr = readTreeRef()
+ val tpt = readTreeRef()
+ Typed(expr, tpt)
+
+ case TYPEAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ TypeApply(fun, args)
+
+ case APPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ /*
+ if (fun.symbol.isOverloaded) {
+ fun.setType(fun.symbol.info)
+ inferMethodAlternative(fun, args map (_.tpe), tpe)
+ }
+*/
+ Apply(fun, args) // note: can't deal with overloaded syms yet
+
+ case APPLYDYNAMICtree =>
+ setSym()
+ val qual = readTreeRef()
+ val args = until(end, readTreeRef)
+ unimplementedTree("APPLYDYNAMIC")
+
+ case SUPERtree =>
+ setSym()
+ val qual = readTreeRef()
+ val mix = readTypeNameRef()
+ Super(qual, mix, inConstrCall = false) // todo: revise
+
+ case THIStree =>
+ setSym()
+ val name = readTypeNameRef()
+ This(symbol.asClass)
+
+ case SELECTtree =>
+ setSym()
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ qualifier.select(symbol.namedType)
+ case IDENTtree =>
+ setSymName()
+ Ident(symbol.namedType)
+
+ case LITERALtree =>
+ Literal(readConstantRef())
+
+ case TYPEtree =>
+ TypeTree(tpe)
+
+ case ANNOTATEDtree =>
+ val annot = readTreeRef()
+ val arg = readTreeRef()
+ Annotated(arg, annot)
+
+ case SINGLETONTYPEtree =>
+ SingletonTypeTree(readTreeRef())
+
+ case SELECTFROMTYPEtree =>
+ val qualifier = readTreeRef()
+ val selector = readTypeNameRef()
+ Select(qualifier, symbol.namedType)
+
+ case COMPOUNDTYPEtree =>
+ readTemplateRef()
+ TypeTree(tpe)
+
+ case APPLIEDTYPEtree =>
+ val tpt = readTreeRef()
+ val args = until(end, readTreeRef)
+ AppliedTypeTree(tpt, args)
+
+ case TYPEBOUNDStree =>
+ val lo = readTreeRef()
+ val hi = readTreeRef()
+ TypeBoundsTree(lo, hi)
+
+ case EXISTENTIALTYPEtree =>
+ val tpt = readTreeRef()
+ val whereClauses = until(end, readTreeRef)
+ TypeTree(tpe)
+
+ case _ =>
+ noSuchTreeTag(tag, end)
+ }
+ }
+
+ def noSuchTreeTag(tag: Int, end: Int)(implicit ctx: Context) =
+ errorBadSignature("unknown tree type (" + tag + ")")
+
+ def unimplementedTree(what: String)(implicit ctx: Context) =
+ errorBadSignature(s"cannot read $what trees from Scala 2.x signatures")
+
+ def readModifiers(isType: Boolean)(implicit ctx: Context): Modifiers = {
+ val tag = readNat()
+ if (tag != MODIFIERS)
+ errorBadSignature("expected a modifiers tag (" + tag + ")")
+ val end = readNat() + readIndex
+ val pflagsHi = readNat()
+ val pflagsLo = readNat()
+ val pflags = (pflagsHi.toLong << 32) + pflagsLo
+ val flags = unpickleScalaFlags(pflags, isType)
+ val privateWithin = readNameRef().asTypeName
+ Modifiers(flags, privateWithin, Nil)
+ }
+
+ protected def readTemplateRef()(implicit ctx: Context): Template =
+ readTreeRef() match {
+ case templ: Template => templ
+ case other =>
+ errorBadSignature("expected a template (" + other + ")")
+ }
+ protected def readCaseDefRef()(implicit ctx: Context): CaseDef =
+ readTreeRef() match {
+ case tree: CaseDef => tree
+ case other =>
+ errorBadSignature("expected a case def (" + other + ")")
+ }
+ protected def readValDefRef()(implicit ctx: Context): ValDef =
+ readTreeRef() match {
+ case tree: ValDef => tree
+ case other =>
+ errorBadSignature("expected a ValDef (" + other + ")")
+ }
+ protected def readIdentRef()(implicit ctx: Context): Ident =
+ readTreeRef() match {
+ case tree: Ident => tree
+ case other =>
+ errorBadSignature("expected an Ident (" + other + ")")
+ }
+ protected def readTypeDefRef()(implicit ctx: Context): TypeDef =
+ readTreeRef() match {
+ case tree: TypeDef => tree
+ case other =>
+ errorBadSignature("expected an TypeDef (" + other + ")")
+ }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
new file mode 100644
index 000000000..b84e2eb47
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
@@ -0,0 +1,132 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.reflect.internal.Chars._
+
+abstract class CharArrayReader { self =>
+
+ val buf: Array[Char]
+ protected def startFrom = 0
+
+ /** Switch whether unicode should be decoded */
+ protected def decodeUni: Boolean = true
+
+ /** An error routine to call on bad unicode escapes \\uxxxx. */
+ protected def error(msg: String, offset: Int): Unit
+
+ /** the last read character */
+ var ch: Char = _
+
+ /** The offset one past the last read character */
+ var charOffset: Int = startFrom
+
+ /** The offset before the last read character */
+ var lastCharOffset: Int = startFrom
+
+ /** The start offset of the current line */
+ var lineStartOffset: Int = startFrom
+
+ /** The start offset of the line before the current one */
+ var lastLineStartOffset: Int = startFrom
+
+ private var lastUnicodeOffset = -1
+
+ /** Is last character a unicode escape \\uxxxx? */
+ def isUnicodeEscape = charOffset == lastUnicodeOffset
+
+ /** Advance one character; reducing CR;LF pairs to just LF */
+ final def nextChar(): Unit = {
+ val idx = charOffset
+ lastCharOffset = idx
+ if (idx >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(idx)
+ ch = c
+ charOffset = idx + 1
+ if (c == '\\') potentialUnicode()
+ else if (c < ' ') { skipCR(); potentialLineEnd() }
+ }
+ }
+
+ def getc() = { nextChar() ; ch }
+
+ /** Advance one character, leaving CR;LF pairs intact.
+ * This is for use in multi-line strings, so there are no
+ * "potential line ends" here.
+ */
+ final def nextRawChar(): Unit = {
+ val idx = charOffset
+ lastCharOffset = idx
+ if (idx >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(charOffset)
+ ch = c
+ charOffset = idx + 1
+ if (c == '\\') potentialUnicode()
+ }
+ }
+
+ /** Interpret \\uxxxx escapes */
+ private def potentialUnicode(): Unit = {
+ def evenSlashPrefix: Boolean = {
+ var p = charOffset - 2
+ while (p >= 0 && buf(p) == '\\') p -= 1
+ (charOffset - p) % 2 == 0
+ }
+ def udigit: Int = {
+ if (charOffset >= buf.length) {
+ // Since the positioning code is very insistent about throwing exceptions,
+ // we have to decrement the position so our error message can be seen, since
+ // we are one past EOF. This happens with e.g. val x = \ u 1 <EOF>
+ error("incomplete unicode escape", charOffset - 1)
+ SU
+ }
+ else {
+ val d = digit2int(buf(charOffset), 16)
+ if (d >= 0) charOffset += 1
+ else error("error in unicode escape", charOffset)
+ d
+ }
+ }
+ if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) {
+ do charOffset += 1
+ while (charOffset < buf.length && buf(charOffset) == 'u')
+ val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
+ lastUnicodeOffset = charOffset
+ ch = code.toChar
+ }
+ }
+
+ /** replace CR;LF by LF */
+ private def skipCR(): Unit = {
+ if (ch == CR)
+ if (charOffset < buf.length && buf(charOffset) == LF) {
+ charOffset += 1
+ ch = LF
+ }
+ }
+
+ /** Handle line ends */
+ private def potentialLineEnd(): Unit = {
+ if (ch == LF || ch == FF) {
+ lastLineStartOffset = lineStartOffset
+ lineStartOffset = charOffset
+ }
+ }
+
+ def isAtEnd = charOffset >= buf.length
+
+ /** A new reader that takes off at the current character position */
+ def lookaheadReader = new CharArrayLookaheadReader
+
+ class CharArrayLookaheadReader extends CharArrayReader {
+ val buf = self.buf
+ charOffset = self.charOffset
+ ch = self.ch
+ override def decodeUni = self.decodeUni
+ def error(msg: String, offset: Int) = self.error(msg, offset)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
new file mode 100644
index 000000000..0f63b25bb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -0,0 +1,898 @@
+package dotty.tools
+package dotc
+package parsing
+
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Flags
+import dotty.tools.dotc.core.Flags.FlagSet
+
+import scala.language.implicitConversions
+
+import JavaTokens._
+import JavaScanners._
+import Scanners.Offset
+import Parsers._
+import core._
+import Contexts._
+import Names._
+import NameOps._
+import Types._
+import Symbols._
+import ast.Trees._
+import Decorators._
+import StdNames._
+import dotty.tools.dotc.reporting.diagnostic.messages.IdentifierExpected
+import dotty.tools.dotc.util.SourceFile
+import util.Positions._
+import annotation.switch
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.Collections._
+
+object JavaParsers {
+
+ import ast.untpd._
+
+ class JavaParser(source: SourceFile)(implicit ctx: Context) extends ParserCommon(source) {
+
+ val definitions = ctx.definitions
+ import definitions._
+
+ val in: JavaScanner = new JavaScanner(source)
+
+ /** The simple name of the package of the currently parsed file */
+ private var thisPackageName: TypeName = tpnme.EMPTY
+
+ /** This is the general parse entry point.
+ * Overridden by ScriptParser
+ */
+ def parse(): Tree = {
+ val t = compilationUnit()
+ accept(EOF)
+ t
+ }
+
+ // -------- error handling ---------------------------------------
+
+ protected def skip(): Unit = {
+ var nparens = 0
+ var nbraces = 0
+ while (true) {
+ in.token match {
+ case EOF =>
+ return
+ case SEMI =>
+ if (nparens == 0 && nbraces == 0) return
+ case RPAREN =>
+ nparens -= 1
+ case RBRACE =>
+ if (nbraces == 0) return
+ nbraces -= 1
+ case LPAREN =>
+ nparens += 1
+ case LBRACE =>
+ nbraces += 1
+ case _ =>
+ }
+ in.nextToken()
+ }
+ }
+
+ def syntaxError(msg: String, skipIt: Boolean): Unit = {
+ syntaxError(in.offset, msg, skipIt)
+ }
+
+ def syntaxError(pos: Int, msg: String, skipIt: Boolean): Unit = {
+ if (pos > lastErrorOffset) {
+ syntaxError(msg, pos)
+ // no more errors on this token.
+ lastErrorOffset = in.offset
+ }
+ if (skipIt)
+ skip()
+ }
+ def errorTypeTree = TypeTree().withType(ErrorType) withPos Position(in.offset)
+
+ // --------- tree building -----------------------------
+
+ def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
+
+ def javaDot(name: Name): Tree =
+ Select(rootDot(nme.java), name)
+
+ def javaLangDot(name: Name): Tree =
+ Select(javaDot(nme.lang), name)
+
+ def javaLangObject(): Tree = javaLangDot(tpnme.Object)
+
+ def arrayOf(tpt: Tree) =
+ AppliedTypeTree(Ident(nme.Array.toTypeName), List(tpt))
+
+ def unimplementedExpr = Ident("???".toTermName)
+
+ def makeTemplate(parents: List[Tree], stats: List[Tree], tparams: List[TypeDef], needsDummyConstr: Boolean) = {
+ def pullOutFirstConstr(stats: List[Tree]): (Tree, List[Tree]) = stats match {
+ case (meth: DefDef) :: rest if meth.name == CONSTRUCTOR => (meth, rest)
+ case first :: rest =>
+ val (constr, tail) = pullOutFirstConstr(rest)
+ (constr, first :: tail)
+ case nil => (EmptyTree, nil)
+ }
+ var (constr1, stats1) = pullOutFirstConstr(stats)
+ if (constr1 == EmptyTree) constr1 = makeConstructor(List(), tparams)
+ // A dummy first constructor is needed for Java classes so that the real constructors see the
+ // import of the companion object. The constructor has parameter of type Unit so no Java code
+ // can call it.
+ if (needsDummyConstr) {
+ stats1 = constr1 :: stats1
+ constr1 = makeConstructor(List(scalaDot(tpnme.Unit)), tparams, Flags.JavaDefined | Flags.PrivateLocal)
+ }
+ Template(constr1.asInstanceOf[DefDef], parents, EmptyValDef, stats1)
+ }
+
+ def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
+ makeParam(nme.syntheticParamName(count), tpt)
+ def makeParam(name: TermName, tpt: Tree): ValDef =
+ ValDef(name, tpt, EmptyTree).withMods(Modifiers(Flags.JavaDefined | Flags.ParamAccessor))
+
+ def makeConstructor(formals: List[Tree], tparams: List[TypeDef], flags: FlagSet = Flags.JavaDefined) = {
+ val vparams = mapWithIndex(formals)((p, i) => makeSyntheticParam(i + 1, p))
+ DefDef(nme.CONSTRUCTOR, tparams, List(vparams), TypeTree(), EmptyTree).withMods(Modifiers(flags))
+ }
+
+ // ------------- general parsing ---------------------------
+
+ /** skip parent or brace enclosed sequence of things */
+ def skipAhead(): Unit = {
+ var nparens = 0
+ var nbraces = 0
+ do {
+ in.token match {
+ case LPAREN =>
+ nparens += 1
+ case LBRACE =>
+ nbraces += 1
+ case _ =>
+ }
+ in.nextToken()
+ in.token match {
+ case RPAREN =>
+ nparens -= 1
+ case RBRACE =>
+ nbraces -= 1
+ case _ =>
+ }
+ } while (in.token != EOF && (nparens > 0 || nbraces > 0))
+ }
+
+ def skipTo(tokens: Int*): Unit = {
+ while (!(tokens contains in.token) && in.token != EOF) {
+ if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
+ else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
+ else in.nextToken()
+ }
+ }
+
+ /** Consume one token of the specified type, or
+ * signal an error if it is not there.
+ *
+ * @return The offset at the start of the token to accept
+ */
+ def accept(token: Int): Int = {
+ val offset = in.offset
+ if (in.token != token) {
+ val offsetToReport = in.offset
+ val msg =
+ tokenString(token) + " expected but " +
+ tokenString(in.token) + " found."
+
+ syntaxError(offsetToReport, msg, skipIt = true)
+ }
+ if (in.token == token) in.nextToken()
+ offset
+ }
+
+ def acceptClosingAngle(): Unit = {
+ val closers: PartialFunction[Int, Int] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => EQUALS
+ }
+ if (closers isDefinedAt in.token) in.token = closers(in.token)
+ else accept(GT)
+ }
+
+ def identForType(): TypeName = ident().toTypeName
+ def ident(): Name =
+ if (in.token == IDENTIFIER) {
+ val name = in.name
+ in.nextToken()
+ name
+ } else {
+ accept(IDENTIFIER)
+ nme.ERROR
+ }
+
+ def repsep[T <: Tree](p: () => T, sep: Int): List[T] = {
+ val buf = ListBuffer[T](p())
+ while (in.token == sep) {
+ in.nextToken()
+ buf += p()
+ }
+ buf.toList
+ }
+
+ /** Convert (qual)ident to type identifier
+ */
+ def convertToTypeId(tree: Tree): Tree = convertToTypeName(tree) match {
+ case Some(t) => t withPos tree.pos
+ case _ => tree match {
+ case AppliedTypeTree(_, _) | Select(_, _) =>
+ tree
+ case _ =>
+ syntaxError(IdentifierExpected(tree.show), tree.pos)
+ errorTypeTree
+ }
+ }
+
+ /** Translate names in Select/Ident nodes to type names.
+ */
+ def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
+ case Select(qual, name) => Some(Select(qual, name.toTypeName))
+ case Ident(name) => Some(Ident(name.toTypeName))
+ case _ => None
+ }
+ // -------------------- specific parsing routines ------------------
+
+ def qualId(): RefTree = {
+ var t: RefTree = atPos(in.offset) { Ident(ident()) }
+ while (in.token == DOT) {
+ in.nextToken()
+ t = atPos(t.pos.start, in.offset) { Select(t, ident()) }
+ }
+ t
+ }
+
+ def optArrayBrackets(tpt: Tree): Tree =
+ if (in.token == LBRACKET) {
+ val tpt1 = atPos(tpt.pos.start, in.offset) { arrayOf(tpt) }
+ in.nextToken()
+ accept(RBRACKET)
+ optArrayBrackets(tpt1)
+ } else tpt
+
+ def basicType(): Tree =
+ atPos(in.offset) {
+ in.token match {
+ case BYTE => in.nextToken(); TypeTree(ByteType)
+ case SHORT => in.nextToken(); TypeTree(ShortType)
+ case CHAR => in.nextToken(); TypeTree(CharType)
+ case INT => in.nextToken(); TypeTree(IntType)
+ case LONG => in.nextToken(); TypeTree(LongType)
+ case FLOAT => in.nextToken(); TypeTree(FloatType)
+ case DOUBLE => in.nextToken(); TypeTree(DoubleType)
+ case BOOLEAN => in.nextToken(); TypeTree(BooleanType)
+ case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
+ }
+ }
+
+ def typ(): Tree =
+ optArrayBrackets {
+ if (in.token == FINAL) in.nextToken()
+ if (in.token == IDENTIFIER) {
+ var t = typeArgs(atPos(in.offset)(Ident(ident())))
+ // typeSelect generates Select nodes if the lhs is an Ident or Select,
+ // For other nodes it always assumes that the selected item is a type.
+ def typeSelect(t: Tree, name: Name) = t match {
+ case Ident(_) | Select(_, _) => Select(t, name)
+ case _ => Select(t, name.toTypeName)
+ }
+ while (in.token == DOT) {
+ in.nextToken()
+ t = typeArgs(atPos(t.pos.start, in.offset)(typeSelect(t, ident())))
+ }
+ convertToTypeId(t)
+ } else {
+ basicType()
+ }
+ }
+
+ def typeArgs(t: Tree): Tree = {
+ var wildnum = 0
+ def typeArg(): Tree =
+ if (in.token == QMARK) {
+ val offset = in.offset
+ in.nextToken()
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree
+ val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree
+ atPos(offset) {
+ /*
+ TypeDef(
+ Modifiers(Flags.JavaDefined | Flags.Deferred),
+ typeName("_$" +(wildnum += 1)),
+ List(),
+ TypeBoundsTree(lo, hi))
+ */
+ TypeBoundsTree(lo, hi)
+ }
+ } else {
+ typ()
+ }
+ if (in.token == LT) {
+ in.nextToken()
+ val t1 = convertToTypeId(t)
+ val args = repsep(typeArg, COMMA)
+ acceptClosingAngle()
+ atPos(t1.pos.start) {
+ AppliedTypeTree(t1, args)
+ }
+ } else t
+ }
+
+ def annotations(): List[Tree] = {
+ //var annots = new ListBuffer[Tree]
+ while (in.token == AT) {
+ in.nextToken()
+ annotation()
+ }
+ List() // don't pass on annotations for now
+ }
+
+ /** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
+ */
+ def annotation(): Unit = {
+ qualId()
+ if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
+ else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
+ }
+
+ def modifiers(inInterface: Boolean): Modifiers = {
+ var flags = Flags.JavaDefined
+ // assumed true unless we see public/private/protected
+ var isPackageAccess = true
+ var annots: List[Tree] = Nil
+ def addAnnot(sym: ClassSymbol) =
+ annots :+= atPos(in.offset) {
+ in.nextToken()
+ New(TypeTree(sym.typeRef))
+ }
+
+ while (true) {
+ in.token match {
+ case AT if (in.lookaheadToken != INTERFACE) =>
+ in.nextToken()
+ annotation()
+ case PUBLIC =>
+ isPackageAccess = false
+ in.nextToken()
+ case PROTECTED =>
+ flags |= Flags.Protected
+ in.nextToken()
+ case PRIVATE =>
+ isPackageAccess = false
+ flags |= Flags.Private
+ in.nextToken()
+ case STATIC =>
+ flags |= Flags.JavaStatic
+ in.nextToken()
+ case ABSTRACT =>
+ flags |= Flags.Abstract
+ in.nextToken()
+ case FINAL =>
+ flags |= Flags.Final
+ in.nextToken()
+ case DEFAULT =>
+ flags |= Flags.DefaultMethod
+ in.nextToken()
+ case NATIVE =>
+ addAnnot(NativeAnnot)
+ case TRANSIENT =>
+ addAnnot(TransientAnnot)
+ case VOLATILE =>
+ addAnnot(VolatileAnnot)
+ case SYNCHRONIZED | STRICTFP =>
+ in.nextToken()
+ case _ =>
+ val privateWithin: TypeName =
+ if (isPackageAccess && !inInterface) thisPackageName
+ else tpnme.EMPTY
+
+ return Modifiers(flags, privateWithin) withAnnotations annots
+ }
+ }
+ assert(false, "should not be here")
+ throw new RuntimeException
+ }
+
+ def typeParams(flags: FlagSet = Flags.JavaDefined | Flags.PrivateLocal | Flags.Param): List[TypeDef] =
+ if (in.token == LT) {
+ in.nextToken()
+ val tparams = repsep(() => typeParam(flags), COMMA)
+ acceptClosingAngle()
+ tparams
+ } else List()
+
+ def typeParam(flags: FlagSet): TypeDef =
+ atPos(in.offset) {
+ val name = identForType()
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
+ TypeDef(name, TypeBoundsTree(EmptyTree, hi)).withMods(Modifiers(flags))
+ }
+
+ def bound(): Tree =
+ atPos(in.offset) {
+ val buf = ListBuffer[Tree](typ())
+ while (in.token == AMP) {
+ in.nextToken()
+ buf += typ()
+ }
+ val ts = buf.toList
+ if (ts.tail.isEmpty) ts.head
+ else ts.reduce(AndTypeTree(_,_))
+ }
+
+ def formalParams(): List[ValDef] = {
+ accept(LPAREN)
+ val vparams = if (in.token == RPAREN) List() else repsep(formalParam, COMMA)
+ accept(RPAREN)
+ vparams
+ }
+
+ def formalParam(): ValDef = {
+ val start = in.offset
+ if (in.token == FINAL) in.nextToken()
+ annotations()
+ var t = typ()
+ if (in.token == DOTDOTDOT) {
+ in.nextToken()
+ t = atPos(t.pos.start) {
+ PostfixOp(t, nme.raw.STAR)
+ }
+ }
+ atPos(start, in.offset) {
+ varDecl(Modifiers(Flags.JavaDefined | Flags.Param), t, ident().toTermName)
+ }
+ }
+
+ def optThrows(): Unit = {
+ if (in.token == THROWS) {
+ in.nextToken()
+ repsep(typ, COMMA)
+ }
+ }
+
+ def methodBody(): Tree = atPos(in.offset) {
+ skipAhead()
+ accept(RBRACE) // skip block
+ unimplementedExpr
+ }
+
+ def definesInterface(token: Int) = token == INTERFACE || token == AT
+
+ def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = {
+ val inInterface = definesInterface(parentToken)
+ val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List()
+ val isVoid = in.token == VOID
+ var rtpt =
+ if (isVoid)
+ atPos(in.offset) {
+ in.nextToken()
+ TypeTree(UnitType)
+ }
+ else typ()
+ var nameOffset = in.offset
+ val rtptName = rtpt match {
+ case Ident(name) => name
+ case _ => nme.EMPTY
+ }
+ if (in.token == LPAREN && rtptName != nme.EMPTY && !inInterface) {
+ // constructor declaration
+ val vparams = formalParams()
+ optThrows()
+ List {
+ atPos(start) {
+ DefDef(nme.CONSTRUCTOR, parentTParams,
+ List(vparams), TypeTree(), methodBody()).withMods(mods)
+ }
+ }
+ } else {
+ var mods1 = mods
+ if (mods is Flags.Abstract) mods1 = mods &~ Flags.Abstract
+ nameOffset = in.offset
+ val name = ident()
+ if (in.token == LPAREN) {
+ // method declaration
+ val vparams = formalParams()
+ if (!isVoid) rtpt = optArrayBrackets(rtpt)
+ optThrows()
+ val bodyOk = !inInterface || (mods is Flags.DefaultMethod)
+ val body =
+ if (bodyOk && in.token == LBRACE) {
+ methodBody()
+ } else {
+ if (parentToken == AT && in.token == DEFAULT) {
+ val annot =
+ atPos(nameOffset) {
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
+ }
+ mods1 = mods1 withAddedAnnotation annot
+ val unimplemented = unimplementedExpr
+ skipTo(SEMI)
+ accept(SEMI)
+ unimplemented
+ } else {
+ accept(SEMI)
+ EmptyTree
+ }
+ }
+ //if (inInterface) mods1 |= Flags.Deferred
+ List {
+ atPos(start, nameOffset) {
+ DefDef(name.toTermName, tparams, List(vparams), rtpt, body).withMods(mods1 | Flags.Method)
+ }
+ }
+ } else {
+ if (inInterface) mods1 |= Flags.Final | Flags.JavaStatic
+ val result = fieldDecls(start, nameOffset, mods1, rtpt, name)
+ accept(SEMI)
+ result
+ }
+ }
+ }
+
+ /** Parse a sequence of field declarations, separated by commas.
+ * This one is tricky because a comma might also appear in an
+ * initializer. Since we don't parse initializers we don't know
+ * what the comma signifies.
+ * We solve this with a second list buffer `maybe` which contains
+ * potential variable definitions.
+ * Once we have reached the end of the statement, we know whether
+ * these potential definitions are real or not.
+ */
+ def fieldDecls(start: Offset, firstNameOffset: Offset, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
+ val buf = ListBuffer[Tree](
+ atPos(start, firstNameOffset) { varDecl(mods, tpt, name.toTermName) })
+ val maybe = new ListBuffer[Tree] // potential variable definitions.
+ while (in.token == COMMA) {
+ in.nextToken()
+ if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
+ val nextNameOffset = in.offset
+ val name = ident()
+ if (in.token == EQUALS || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
+ buf ++= maybe
+ buf += atPos(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) }
+ maybe.clear()
+ } else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
+ maybe += atPos(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) }
+ } else { // ... if there's something else we were still in the initializer of the
+ // previous var def; skip to next comma or semicolon.
+ skipTo(COMMA, SEMI)
+ maybe.clear()
+ }
+ } else { // ... if there's no ident following the comma we were still in the initializer of the
+ // previous var def; skip to next comma or semicolon.
+ skipTo(COMMA, SEMI)
+ maybe.clear()
+ }
+ }
+ if (in.token == SEMI) {
+ buf ++= maybe // every potential vardef that survived until here is real.
+ }
+ buf.toList
+ }
+
+ def varDecl(mods: Modifiers, tpt: Tree, name: TermName): ValDef = {
+ val tpt1 = optArrayBrackets(tpt)
+ if (in.token == EQUALS && !(mods is Flags.Param)) skipTo(COMMA, SEMI)
+ val mods1 = if (mods is Flags.Final) mods else mods | Flags.Mutable
+ ValDef(name, tpt1, if (mods is Flags.Param) EmptyTree else unimplementedExpr).withMods(mods1)
+ }
+
+ def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match {
+ case CLASS | ENUM | INTERFACE | AT =>
+ typeDecl(start, if (definesInterface(parentToken)) mods | Flags.JavaStatic else mods)
+ case _ =>
+ termDecl(start, mods, parentToken, parentTParams)
+ }
+
+ def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree =
+ atPos(cdef.pos) {
+ assert(cdef.pos.exists)
+ ModuleDef(cdef.name.toTermName,
+ makeTemplate(List(), statics, List(), false)).withMods((cdef.mods & (Flags.AccessFlags | Flags.JavaDefined)).toTermFlags)
+ }
+
+ def importCompanionObject(cdef: TypeDef): Tree =
+ Import(Ident(cdef.name.toTermName).withPos(NoPosition), Ident(nme.WILDCARD) :: Nil)
+
+ // Importing the companion object members cannot be done uncritically: see
+ // ticket #2377 wherein a class contains two static inner classes, each of which
+ // has a static inner class called "Builder" - this results in an ambiguity error
+ // when each performs the import in the enclosing class's scope.
+ //
+ // To address this I moved the import Companion._ inside the class, as the first
+ // statement. This should work without compromising the enclosing scope, but may (?)
+ // end up suffering from the same issues it does in scala - specifically that this
+ // leaves auxiliary constructors unable to access members of the companion object
+ // as unqualified identifiers.
+ def addCompanionObject(statics: List[Tree], cdef: TypeDef): List[Tree] = {
+ // if there are no statics we can use the original cdef, but we always
+ // create the companion so import A._ is not an error (see ticket #1700)
+ val cdefNew =
+ if (statics.isEmpty) cdef
+ else {
+ val template = cdef.rhs.asInstanceOf[Template]
+ cpy.TypeDef(cdef)(cdef.name,
+ cpy.Template(template)(template.constr, template.parents, template.self,
+ importCompanionObject(cdef) :: template.body)).withMods(cdef.mods)
+ }
+
+ List(makeCompanionObject(cdefNew, statics), cdefNew)
+ }
+
+ def importDecl(): List[Tree] = {
+ val start = in.offset
+ accept(IMPORT)
+ val buf = new ListBuffer[Name]
+ def collectIdents() : Int = {
+ if (in.token == ASTERISK) {
+ val starOffset = in.offset
+ in.nextToken()
+ buf += nme.WILDCARD
+ starOffset
+ } else {
+ val nameOffset = in.offset
+ buf += ident()
+ if (in.token == DOT) {
+ in.nextToken()
+ collectIdents()
+ } else nameOffset
+ }
+ }
+ if (in.token == STATIC) in.nextToken()
+ else buf += nme.ROOTPKG
+ val lastnameOffset = collectIdents()
+ accept(SEMI)
+ val names = buf.toList
+ if (names.length < 2) {
+ syntaxError(start, "illegal import", skipIt = false)
+ List()
+ } else {
+ val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
+ val lastname = names.last
+ val ident = Ident(lastname) withPos Position(lastnameOffset)
+// val selector = lastname match {
+// case nme.WILDCARD => Pair(ident, Ident(null) withPos Position(-1))
+// case _ => Pair(ident, ident)
+// }
+ val imp = atPos(start) { Import(qual, List(ident)) }
+ imp :: Nil
+ }
+ }
+
+ def interfacesOpt() =
+ if (in.token == IMPLEMENTS) {
+ in.nextToken()
+ repsep(typ, COMMA)
+ } else {
+ List()
+ }
+
+ def classDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(CLASS)
+ val nameOffset = in.offset
+ val name = identForType()
+ val tparams = typeParams()
+ val superclass =
+ if (in.token == EXTENDS) {
+ in.nextToken()
+ typ()
+ } else {
+ javaLangObject()
+ }
+ val interfaces = interfacesOpt()
+ val (statics, body) = typeBody(CLASS, name, tparams)
+ val cls = atPos(start, nameOffset) {
+ TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, true)).withMods(mods)
+ }
+ addCompanionObject(statics, cls)
+ }
+
+ def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(INTERFACE)
+ val nameOffset = in.offset
+ val name = identForType()
+ val tparams = typeParams()
+ val parents =
+ if (in.token == EXTENDS) {
+ in.nextToken()
+ repsep(typ, COMMA)
+ } else {
+ List(javaLangObject())
+ }
+ val (statics, body) = typeBody(INTERFACE, name, tparams)
+ val iface = atPos(start, nameOffset) {
+ TypeDef(
+ name,
+ makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract)
+ }
+ addCompanionObject(statics, iface)
+ }
+
+ def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = {
+ accept(LBRACE)
+ val defs = typeBodyDecls(leadingToken, parentName, parentTParams)
+ accept(RBRACE)
+ defs
+ }
+
+ def typeBodyDecls(parentToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = {
+ val inInterface = definesInterface(parentToken)
+ val statics = new ListBuffer[Tree]
+ val members = new ListBuffer[Tree]
+ while (in.token != RBRACE && in.token != EOF) {
+ val start = in.offset
+ var mods = atPos(start) { modifiers(inInterface) }
+ if (in.token == LBRACE) {
+ skipAhead() // skip init block, we just assume we have seen only static
+ accept(RBRACE)
+ } else if (in.token == SEMI) {
+ in.nextToken()
+ } else {
+ if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.JavaStatic
+ val decls = memberDecl(start, mods, parentToken, parentTParams)
+ (if ((mods is Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef])))
+ statics
+ else
+ members) ++= decls
+ }
+ }
+ def forwarders(sdef: Tree): List[Tree] = sdef match {
+ case TypeDef(name, _) if (parentToken == INTERFACE) =>
+ var rhs: Tree = Select(Ident(parentName.toTermName), name)
+ List(TypeDef(name, rhs).withMods(Modifiers(Flags.Protected)))
+ case _ =>
+ List()
+ }
+ val sdefs = statics.toList
+ val idefs = members.toList ::: (sdefs flatMap forwarders)
+ (sdefs, idefs)
+ }
+ def annotationParents = List(
+ scalaAnnotationDot(tpnme.Annotation),
+ Select(javaLangDot(nme.annotation), tpnme.Annotation),
+ scalaAnnotationDot(tpnme.ClassfileAnnotation)
+ )
+ def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(AT)
+ accept(INTERFACE)
+ val nameOffset = in.offset
+ val name = identForType()
+ val (statics, body) = typeBody(AT, name, List())
+ val constructorParams = body.collect {
+ case dd: DefDef => makeParam(dd.name, dd.tpt)
+ }
+ val constr = DefDef(nme.CONSTRUCTOR,
+ List(), List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined))
+ val body1 = body.filterNot(_.isInstanceOf[DefDef])
+ val templ = makeTemplate(annotationParents, constr :: body1, List(), false)
+ val annot = atPos(start, nameOffset) {
+ TypeDef(name, templ).withMods(mods | Flags.Abstract)
+ }
+ addCompanionObject(statics, annot)
+ }
+
+ def enumDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(ENUM)
+ val nameOffset = in.offset
+ val name = identForType()
+ def enumType = Ident(name)
+ val interfaces = interfacesOpt()
+ accept(LBRACE)
+ val buf = new ListBuffer[Tree]
+ def parseEnumConsts(): Unit = {
+ if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
+ buf += enumConst(enumType)
+ if (in.token == COMMA) {
+ in.nextToken()
+ parseEnumConsts()
+ }
+ }
+ }
+ parseEnumConsts()
+ val consts = buf.toList
+ val (statics, body) =
+ if (in.token == SEMI) {
+ in.nextToken()
+ typeBodyDecls(ENUM, name, List())
+ } else {
+ (List(), List())
+ }
+ val predefs = List(
+ DefDef(
+ nme.values, List(),
+ ListOfNil,
+ arrayOf(enumType),
+ unimplementedExpr).withMods(Modifiers(Flags.JavaDefined | Flags.JavaStatic | Flags.Method)),
+ DefDef(
+ nme.valueOf, List(),
+ List(List(makeParam("x".toTermName, TypeTree(StringType)))),
+ enumType,
+ unimplementedExpr).withMods(Modifiers(Flags.JavaDefined | Flags.JavaStatic | Flags.Method)))
+ accept(RBRACE)
+ /*
+ val superclazz =
+ AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
+ */
+ val superclazz = Apply(TypeApply(
+ Select(New(javaLangDot(tpnme.Enum)), nme.CONSTRUCTOR), List(enumType)),
+ List(Literal(Constant(null)),Literal(Constant(0))))
+ val enum = atPos(start, nameOffset) {
+ TypeDef(name,
+ makeTemplate(superclazz :: interfaces, body, List(), true)).withMods(mods | Flags.Enum)
+ }
+ addCompanionObject(consts ::: statics ::: predefs, enum)
+ }
+
+ def enumConst(enumType: Tree) = {
+ annotations()
+ atPos(in.offset) {
+ val name = ident()
+ if (in.token == LPAREN) {
+ // skip arguments
+ skipAhead()
+ accept(RPAREN)
+ }
+ if (in.token == LBRACE) {
+ // skip classbody
+ skipAhead()
+ accept(RBRACE)
+ }
+ ValDef(name.toTermName, enumType, unimplementedExpr).withMods(Modifiers(Flags.Enum | Flags.Stable | Flags.JavaDefined | Flags.JavaStatic))
+ }
+ }
+
+ def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match {
+ case ENUM => enumDecl(start, mods)
+ case INTERFACE => interfaceDecl(start, mods)
+ case AT => annotationDecl(start, mods)
+ case CLASS => classDecl(start, mods)
+ case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
+ }
+
+ /** CompilationUnit ::= [package QualId semi] TopStatSeq
+ */
+ def compilationUnit(): Tree = {
+ val start = in.offset
+ val pkg: RefTree =
+ if (in.token == AT || in.token == PACKAGE) {
+ annotations()
+ accept(PACKAGE)
+ val pkg = qualId()
+ accept(SEMI)
+ pkg
+ } else {
+ Ident(nme.EMPTY_PACKAGE)
+ }
+ thisPackageName = convertToTypeName(pkg) match {
+ case Some(t) => t.name.toTypeName
+ case _ => tpnme.EMPTY
+ }
+ val buf = new ListBuffer[Tree]
+ while (in.token == IMPORT)
+ buf ++= importDecl()
+ while (in.token != EOF && in.token != RBRACE) {
+ while (in.token == SEMI) in.nextToken()
+ if (in.token != EOF) {
+ val start = in.offset
+ val mods = atPos(start) { modifiers(inInterface = false) }
+ buf ++= typeDecl(start, mods)
+ }
+ }
+ val unit = atPos(start) { PackageDef(pkg, buf.toList) }
+ accept(EOF)
+ unit
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
new file mode 100644
index 000000000..83e16627c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
@@ -0,0 +1,538 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core.Names._, core.Contexts._, core.Decorators._, util.Positions._
+import Scanners._
+import util.SourceFile
+import JavaTokens._
+import scala.annotation.{ switch, tailrec }
+import scala.reflect.internal.Chars._
+
+object JavaScanners {
+
+ class JavaScanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) {
+
+ def toToken(idx: Int): Token =
+ if (idx >= 0 && idx <= lastKeywordStart) kwArray(idx) else IDENTIFIER
+
+ private class JavaTokenData0 extends TokenData
+
+ /** we need one token lookahead
+ */
+ val next : TokenData = new JavaTokenData0
+ val prev : TokenData = new JavaTokenData0
+
+ // Get next token ------------------------------------------------------------
+
+ def nextToken(): Unit = {
+ if (next.token == EMPTY) {
+ lastOffset = lastCharOffset
+ fetchToken()
+ }
+ else {
+ this copyFrom next
+ next.token = EMPTY
+ }
+ }
+
+ def lookaheadToken: Int = {
+ prev copyFrom this
+ nextToken()
+ val t = token
+ next copyFrom this
+ this copyFrom prev
+ t
+ }
+
+ /** read next token
+ */
+ private def fetchToken(): Unit = {
+ offset = charOffset - 1
+ ch match {
+ case ' ' | '\t' | CR | LF | FF =>
+ nextChar()
+ fetchToken()
+ case _ =>
+ (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' | '_' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+
+ case '0' =>
+ putChar(ch)
+ nextChar()
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ base = 8
+ }
+ getNumber()
+
+ case '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ base = 10
+ getNumber()
+
+ case '\"' =>
+ nextChar()
+ while (ch != '\"' && (isUnicodeEscape || ch != CR && ch != LF && ch != SU)) {
+ getlitch()
+ }
+ if (ch == '\"') {
+ token = STRINGLIT
+ setStrVal()
+ nextChar()
+ } else {
+ error("unclosed string literal")
+ }
+
+ case '\'' =>
+ nextChar()
+ getlitch()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ error("unclosed character literal")
+ }
+
+ case '=' =>
+ token = EQUALS
+ nextChar()
+ if (ch == '=') {
+ token = EQEQ
+ nextChar()
+ }
+
+ case '>' =>
+ token = GT
+ nextChar()
+ if (ch == '=') {
+ token = GTEQ
+ nextChar()
+ } else if (ch == '>') {
+ token = GTGT
+ nextChar()
+ if (ch == '=') {
+ token = GTGTEQ
+ nextChar()
+ } else if (ch == '>') {
+ token = GTGTGT
+ nextChar()
+ if (ch == '=') {
+ token = GTGTGTEQ
+ nextChar()
+ }
+ }
+ }
+
+ case '<' =>
+ token = LT
+ nextChar()
+ if (ch == '=') {
+ token = LTEQ
+ nextChar()
+ } else if (ch == '<') {
+ token = LTLT
+ nextChar()
+ if (ch == '=') {
+ token = LTLTEQ
+ nextChar()
+ }
+ }
+
+ case '!' =>
+ token = BANG
+ nextChar()
+ if (ch == '=') {
+ token = BANGEQ
+ nextChar()
+ }
+
+ case '~' =>
+ token = TILDE
+ nextChar()
+
+ case '?' =>
+ token = QMARK
+ nextChar()
+
+ case ':' =>
+ token = COLON
+ nextChar()
+
+ case '@' =>
+ token = AT
+ nextChar()
+
+ case '&' =>
+ token = AMP
+ nextChar()
+ if (ch == '&') {
+ token = AMPAMP
+ nextChar()
+ } else if (ch == '=') {
+ token = AMPEQ
+ nextChar()
+ }
+
+ case '|' =>
+ token = BAR
+ nextChar()
+ if (ch == '|') {
+ token = BARBAR
+ nextChar()
+ } else if (ch == '=') {
+ token = BAREQ
+ nextChar()
+ }
+
+ case '+' =>
+ token = PLUS
+ nextChar()
+ if (ch == '+') {
+ token = PLUSPLUS
+ nextChar()
+ } else if (ch == '=') {
+ token = PLUSEQ
+ nextChar()
+ }
+
+ case '-' =>
+ token = MINUS
+ nextChar()
+ if (ch == '-') {
+ token = MINUSMINUS
+ nextChar()
+ } else if (ch == '=') {
+ token = MINUSEQ
+ nextChar()
+ }
+
+ case '*' =>
+ token = ASTERISK
+ nextChar()
+ if (ch == '=') {
+ token = ASTERISKEQ
+ nextChar()
+ }
+
+ case '/' =>
+ nextChar()
+ if (!skipComment()) {
+ token = SLASH
+ nextChar()
+ if (ch == '=') {
+ token = SLASHEQ
+ nextChar()
+ }
+ } else fetchToken()
+
+ case '^' =>
+ token = HAT
+ nextChar()
+ if (ch == '=') {
+ token = HATEQ
+ nextChar()
+ }
+
+ case '%' =>
+ token = PERCENT
+ nextChar()
+ if (ch == '=') {
+ token = PERCENTEQ
+ nextChar()
+ }
+
+ case '.' =>
+ token = DOT
+ nextChar()
+ if ('0' <= ch && ch <= '9') {
+ putChar('.');
+ getFraction()
+ } else if (ch == '.') {
+ nextChar()
+ if (ch == '.') {
+ nextChar()
+ token = DOTDOTDOT
+ } else error("`.' character expected")
+ }
+
+ case ';' =>
+ token = SEMI
+ nextChar()
+
+ case ',' =>
+ token = COMMA
+ nextChar()
+
+ case '(' =>
+ token = LPAREN
+ nextChar()
+
+ case '{' =>
+ token = LBRACE
+ nextChar()
+
+ case ')' =>
+ token = RPAREN
+ nextChar()
+
+ case '}' =>
+ token = RBRACE
+ nextChar()
+
+ case '[' =>
+ token = LBRACKET
+ nextChar()
+
+ case ']' =>
+ token = RBRACKET
+ nextChar()
+
+ case SU =>
+ if (isAtEnd) token = EOF
+ else {
+ error("illegal character")
+ nextChar()
+ }
+
+ case _ =>
+ if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else {
+ error("illegal character: " + ch.toInt)
+ nextChar()
+ }
+ }
+ }
+ }
+
+ protected def skipComment(): Boolean = {
+ @tailrec def skipLineComment(): Unit = ch match {
+ case CR | LF | SU =>
+ case _ => nextChar(); skipLineComment()
+ }
+ @tailrec def skipJavaComment(): Unit = ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => nextChar(); if (ch == '/') nextChar() else skipJavaComment()
+ case _ => nextChar(); skipJavaComment()
+ }
+ ch match {
+ case '/' => nextChar(); skipLineComment(); true
+ case '*' => nextChar(); skipJavaComment(); true
+ case _ => false
+ }
+ }
+
+ // Identifiers ---------------------------------------------------------------
+
+ private def getIdentRest(): Unit = {
+ while (true) {
+ (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' |
+ '0' | '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ putChar(ch)
+ nextChar()
+
+ case '_' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ return
+ case SU =>
+ finishNamed()
+ return
+ case _ =>
+ if (Character.isUnicodeIdentifierPart(ch)) {
+ putChar(ch)
+ nextChar()
+ } else {
+ finishNamed()
+ return
+ }
+ }
+ }
+ }
+
+ // Literals -----------------------------------------------------------------
+
+ /** read next character in character or string literal:
+ */
+ protected def getlitch() =
+ if (ch == '\\') {
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ val leadch: Char = ch
+ var oct: Int = digit2int(ch, 8)
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ if (leadch <= '3' && '0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ }
+ }
+ putChar(oct.asInstanceOf[Char])
+ } else {
+ ch match {
+ case 'b' => putChar('\b')
+ case 't' => putChar('\t')
+ case 'n' => putChar('\n')
+ case 'f' => putChar('\f')
+ case 'r' => putChar('\r')
+ case '\"' => putChar('\"')
+ case '\'' => putChar('\'')
+ case '\\' => putChar('\\')
+ case _ =>
+ error("invalid escape character", charOffset - 1)
+ putChar(ch)
+ }
+ nextChar()
+ }
+ } else {
+ putChar(ch)
+ nextChar()
+ }
+
+ /** read fractional part and exponent of floating point number
+ * if one is present.
+ */
+ protected def getFraction(): Unit = {
+ token = DOUBLELIT
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ if (ch == 'e' || ch == 'E') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '+' || lookahead.ch == '-') {
+ lookahead.nextChar()
+ }
+ if ('0' <= lookahead.ch && lookahead.ch <= '9') {
+ putChar(ch)
+ nextChar()
+ if (ch == '+' || ch == '-') {
+ putChar(ch)
+ nextChar()
+ }
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ }
+ token = DOUBLELIT
+ }
+ if (ch == 'd' || ch == 'D') {
+ putChar(ch)
+ nextChar()
+ token = DOUBLELIT
+ } else if (ch == 'f' || ch == 'F') {
+ putChar(ch)
+ nextChar()
+ token = FLOATLIT
+ }
+ setStrVal()
+ }
+
+ /** read a number into name and set base
+ */
+ protected def getNumber(): Unit = {
+ while (digit2int(ch, if (base < 10) 10 else base) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ token = INTLIT
+ if (base <= 10 && ch == '.') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ lookahead.ch match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
+ '8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
+ putChar(ch)
+ nextChar()
+ return getFraction()
+ case _ =>
+ if (!isIdentifierStart(lookahead.ch)) {
+ putChar(ch)
+ nextChar()
+ return getFraction()
+ }
+ }
+ }
+ if (base <= 10 &&
+ (ch == 'e' || ch == 'E' ||
+ ch == 'f' || ch == 'F' ||
+ ch == 'd' || ch == 'D')) {
+ return getFraction()
+ }
+ setStrVal()
+ if (ch == 'l' || ch == 'L') {
+ nextChar()
+ token = LONGLIT
+ }
+ }
+
+ // Errors -----------------------------------------------------------------
+
+ override def toString() = token match {
+ case IDENTIFIER =>
+ "id(" + name + ")"
+ case CHARLIT =>
+ "char(" + intVal + ")"
+ case INTLIT =>
+ "int(" + intVal + ")"
+ case LONGLIT =>
+ "long(" + intVal + ")"
+ case FLOATLIT =>
+ "float(" + floatVal + ")"
+ case DOUBLELIT =>
+ "double(" + floatVal + ")"
+ case STRINGLIT =>
+ "string(" + name + ")"
+ case SEMI =>
+ ";"
+ case COMMA =>
+ ","
+ case _ =>
+ tokenString(token)
+ }
+
+ /* Initialization: read first char, then first token */
+ nextChar()
+ nextToken()
+ }
+
+ val (lastKeywordStart, kwArray) = buildKeywordArray(keywords)
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
new file mode 100644
index 000000000..9530e0516
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
@@ -0,0 +1,92 @@
+package dotty.tools
+package dotc
+package parsing
+
+import collection.immutable.BitSet
+
+object JavaTokens extends TokensCommon {
+ final val minToken = EMPTY
+ final val maxToken = DOUBLE
+
+ final val javaOnlyKeywords = tokenRange(INSTANCEOF, ASSERT)
+ final val sharedKeywords = BitSet( IF, FOR, ELSE, THIS, NULL, NEW, SUPER, ABSTRACT, FINAL, PRIVATE, PROTECTED,
+ OVERRIDE, EXTENDS, TRUE, FALSE, CLASS, IMPORT, PACKAGE, DO, THROW, TRY, CATCH, FINALLY, WHILE, RETURN )
+ final val primTypes = tokenRange(VOID, DOUBLE)
+ final val keywords = sharedKeywords | javaOnlyKeywords | primTypes
+
+ /** keywords */
+ final val INSTANCEOF = 101; enter(INSTANCEOF, "instanceof")
+ final val CONST = 102; enter(CONST, "const")
+
+ /** templates */
+ final val INTERFACE = 105; enter(INTERFACE, "interface")
+ final val ENUM = 106; enter(ENUM, "enum")
+ final val IMPLEMENTS = 107; enter(IMPLEMENTS, "implements")
+
+ /** modifiers */
+ final val PUBLIC = 110; enter(PUBLIC, "public")
+ final val DEFAULT = 111; enter(DEFAULT, "default")
+ final val STATIC = 112; enter(STATIC, "static")
+ final val TRANSIENT = 113; enter(TRANSIENT, "transient")
+ final val VOLATILE = 114; enter(VOLATILE, "volatile")
+ final val SYNCHRONIZED = 115; enter(SYNCHRONIZED, "synchronized")
+ final val NATIVE = 116; enter(NATIVE, "native")
+ final val STRICTFP = 117; enter(STRICTFP, "strictfp")
+ final val THROWS = 118; enter(THROWS, "throws")
+
+ /** control structures */
+ final val BREAK = 130; enter(BREAK, "break")
+ final val CONTINUE = 131; enter(CONTINUE, "continue")
+ final val GOTO = 132; enter(GOTO, "goto")
+ final val SWITCH = 133; enter(SWITCH, "switch")
+ final val ASSERT = 134; enter(ASSERT, "assert")
+
+ /** special symbols */
+ final val EQEQ = 140
+ final val BANGEQ = 141
+ final val LT = 142
+ final val GT = 143
+ final val LTEQ = 144
+ final val GTEQ = 145
+ final val BANG = 146
+ final val QMARK = 147
+ final val AMP = 148
+ final val BAR = 149
+ final val PLUS = 150
+ final val MINUS = 151
+ final val ASTERISK = 152
+ final val SLASH = 153
+ final val PERCENT = 154
+ final val HAT = 155
+ final val LTLT = 156
+ final val GTGT = 157
+ final val GTGTGT = 158
+ final val AMPAMP = 159
+ final val BARBAR = 160
+ final val PLUSPLUS = 161
+ final val MINUSMINUS = 162
+ final val TILDE = 163
+ final val DOTDOTDOT = 164
+ final val AMPEQ = 165
+ final val BAREQ = 166
+ final val PLUSEQ = 167
+ final val MINUSEQ = 168
+ final val ASTERISKEQ = 169
+ final val SLASHEQ = 170
+ final val PERCENTEQ = 171
+ final val HATEQ = 172
+ final val LTLTEQ = 173
+ final val GTGTEQ = 174
+ final val GTGTGTEQ = 175
+
+ /** primitive types */
+ final val VOID = 180; enter(VOID, "void")
+ final val BOOLEAN = 181; enter(BOOLEAN, "boolean")
+ final val BYTE = 182; enter(BYTE, "byte")
+ final val SHORT = 183; enter(SHORT, "short")
+ final val CHAR = 184; enter(CHAR, "char")
+ final val INT = 185; enter(INT, "int")
+ final val LONG = 186; enter(LONG, "long")
+ final val FLOAT = 187; enter(FLOAT, "float")
+ final val DOUBLE = 188; enter(DOUBLE, "double")
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala
new file mode 100644
index 000000000..ce2c41797
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala
@@ -0,0 +1,257 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package dotty.tools.dotc
+package parsing
+
+import Utility._
+import scala.reflect.internal.Chars.SU
+
+
+
+/** This is not a public trait - it contains common code shared
+ * between the library level XML parser and the compiler's.
+ * All members should be accessed through those.
+ */
+private[dotty] trait MarkupParserCommon {
+ protected def unreachable = scala.sys.error("Cannot be reached.")
+
+ // type HandleType // MarkupHandler, SymbolicXMLBuilder
+ type InputType // Source, CharArrayReader
+ type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ def xAttributeValue(): String = {
+ val str = xAttributeValue(ch_returning_nextch)
+ // well-formedness constraint
+ normalizeAttributeValue(str)
+ }
+
+ private def takeUntilChar(it: Iterator[Char], end: Char): String = {
+ val buf = new StringBuilder
+ while (it.hasNext) it.next match {
+ case `end` => return buf.toString
+ case ch => buf append ch
+ }
+ scala.sys.error("Expected '%s'".format(end))
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String): Unit = {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ private def attr_unescape(s: String) = s match {
+ case "lt" => "<"
+ case "gt" => ">"
+ case "amp" => "&"
+ case "apos" => "'"
+ case "quot" => "\""
+ case "quote" => "\""
+ case _ => "&" + s + ";"
+ }
+
+ /** Replaces only character references right now.
+ * see spec 3.3.3
+ */
+ private def normalizeAttributeValue(attval: String): String = {
+ val buf = new StringBuilder
+ val it = attval.iterator.buffered
+
+ while (it.hasNext) buf append (it.next match {
+ case ' ' | '\t' | '\n' | '\r' => " "
+ case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' => attr_unescape(takeUntilChar(it, ';'))
+ case c => c
+ })
+
+ buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next
+ Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch)
+
+ /** Create a lookahead reader which does not influence the input */
+ def lookahead(): BufferedIterator[Char]
+
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
+ def ch: Char
+ def nextch(): Unit
+ protected def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
+ def xHandleError(that: Char, msg: String): Unit
+ def reportSyntaxError(str: String): Unit
+ def reportSyntaxError(pos: Int, str: String): Unit
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
+
+ def xToken(that: Char): Unit = {
+ if (ch == that) nextch
+ else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+ }
+ def xToken(that: Seq[Char]): Unit = { that foreach xToken }
+
+ /** scan [S] '=' [S]*/
+ def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
+
+ /** skip optional space S? */
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ def xSpace() =
+ if (isSpace(ch)) { nextch; xSpaceOpt }
+ else xHandleError(ch, "whitespace expected")
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x); x }
+
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
+ /** Take characters from input stream until given String "until"
+ * is seen. Once seen, the accumulated characters are passed
+ * along with the current Position to the supplied handler function.
+ */
+ protected def xTakeUntil[T](
+ handler: (PositionType, String) => T,
+ positioner: () => PositionType,
+ until: String): T =
+ {
+ val sb = new StringBuilder
+ val head = until.head
+ val rest = until.tail
+
+ while (true) {
+ if (ch == head && peek(rest))
+ return handler(positioner(), sb.toString)
+ else if (ch == SU)
+ truncatedError("") // throws TruncatedXMLControl in compiler
+
+ sb append ch
+ nextch
+ }
+ unreachable
+ }
+
+ /** Create a non-destructive lookahead reader and see if the head
+ * of the input would match the given String. If yes, return true
+ * and drop the entire String from input; if no, return false
+ * and leave input unchanged.
+ */
+ private def peek(lookingFor: String): Boolean =
+ (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+ // drop the chars from the real reader (all lookahead + orig)
+ (0 to lookingFor.length) foreach (_ => nextch)
+ true
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala
new file mode 100644
index 000000000..f648b9e2c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala
@@ -0,0 +1,466 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable
+import mutable.{ Buffer, ArrayBuffer, ListBuffer }
+import scala.util.control.ControlThrowable
+import scala.reflect.internal.Chars.SU
+import Parsers._
+import util.Positions._
+import core._
+import Constants._
+import Utility._
+
+
+// XXX/Note: many/most of the functions in here are almost direct cut and pastes
+// from another file - scala.xml.parsing.MarkupParser, it looks like.
+// (It was like that when I got here.) They used to be commented "[Duplicate]" butx
+// since approximately all of them were, I snipped it as noise. As far as I can
+// tell this wasn't for any particularly good reason, but slightly different
+// compiler and library parser interfaces meant it would take some setup.
+//
+// I rewrote most of these, but not as yet the library versions: so if you are
+// tempted to touch any of these, please be aware of that situation and try not
+// to let it get any worse. -- paulp
+
+/** This trait ...
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+object MarkupParsers {
+
+ import ast.untpd._
+
+ case object MissingEndTagControl extends ControlThrowable {
+ override def getMessage = "start tag was here: "
+ }
+
+ case object ConfusedAboutBracesControl extends ControlThrowable {
+ override def getMessage = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <"
+ }
+
+ case object TruncatedXMLControl extends ControlThrowable {
+ override def getMessage = "input ended while parsing XML"
+ }
+
+ class MarkupParser(parser: Parser, final val preserveWS: Boolean) extends MarkupParserCommon {
+
+ import Tokens.{ LBRACE, RBRACE }
+
+ type PositionType = Position
+ type InputType = CharArrayReader
+ type ElementType = Tree
+ type AttributesType = mutable.Map[String, Tree]
+ type NamespaceType = Any // namespaces ignored
+
+ def mkAttributes(name: String, other: NamespaceType): AttributesType = xAttributes
+
+ val eof = false
+
+ def truncatedError(msg: String): Nothing = throw TruncatedXMLControl
+ def xHandleError(that: Char, msg: String) =
+ if (ch == SU) throw TruncatedXMLControl
+ else reportSyntaxError(msg)
+
+ var input : CharArrayReader = _
+ def lookahead(): BufferedIterator[Char] =
+ (input.buf drop input.charOffset).iterator.buffered
+
+ import parser.{ symbXMLBuilder => handle }
+
+ def curOffset : Int = input.charOffset - 1
+ var tmppos : Position = NoPosition
+ def ch = input.ch
+ /** this method assign the next character to ch and advances in input */
+ def nextch(): Unit = { input.nextChar() }
+
+ protected def ch_returning_nextch: Char = {
+ val result = ch; input.nextChar(); result
+ }
+
+ def mkProcInstr(position: Position, name: String, text: String): ElementType =
+ parser.symbXMLBuilder.procInstr(position, name, text)
+
+ var xEmbeddedBlock = false
+
+ private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private def debugLastPos = debugLastStartElement.top._1
+ private def debugLastElem = debugLastStartElement.top._2
+
+ private def errorBraces() = {
+ reportSyntaxError("in XML content, please use '}}' to express '}'")
+ throw ConfusedAboutBracesControl
+ }
+ def errorNoEnd(tag: String) = {
+ reportSyntaxError("expected closing tag of " + tag)
+ throw MissingEndTagControl
+ }
+
+ /** checks whether next character starts a Scala block, if yes, skip it.
+ * @return true if next character starts a scala block
+ */
+ def xCheckEmbeddedBlock: Boolean = {
+ // attentions, side-effect, used in xText
+ xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock
+ }
+
+ /** parse attribute and add it to listmap
+ * [41] Attributes ::= { S Name Eq AttValue }
+ * AttValue ::= `'` { _ } `'`
+ * | `"` { _ } `"`
+ * | `{` scalablock `}`
+ */
+ def xAttributes = {
+ val aMap = mutable.LinkedHashMap[String, Tree]()
+
+ while (isNameStart(ch)) {
+ val start = curOffset
+ val key = xName
+ xEQ
+ val delim = ch
+ val mid = curOffset
+ val value: Tree = ch match {
+ case '"' | '\'' =>
+ val tmp = xAttributeValue(ch_returning_nextch)
+
+ try handle.parseAttribute(Position(start, curOffset, mid), tmp)
+ catch {
+ case e: RuntimeException =>
+ errorAndResult("error parsing attribute value", parser.errorTermTree)
+ }
+
+ case '{' =>
+ nextch
+ xEmbeddedExpr
+ case SU =>
+ throw TruncatedXMLControl
+ case _ =>
+ errorAndResult("' or \" delimited attribute value or '{' scala-expr '}' expected", Literal(Constant("<syntax-error>")))
+ }
+ // well-formedness constraint: unique attribute names
+ if (aMap contains key)
+ reportSyntaxError("attribute %s may only be defined once" format key)
+
+ aMap(key) = value
+ if (ch != '/' && ch != '>')
+ xSpace
+ }
+ aMap
+ }
+
+ /** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
+ *
+ * see [15]
+ */
+ def xCharData: Tree = {
+ val start = curOffset
+ xToken("[CDATA[")
+ val mid = curOffset
+ xTakeUntil(handle.charData, () => Position(start, curOffset, mid), "]]>")
+ }
+
+ def xUnparsed: Tree = {
+ val start = curOffset
+ xTakeUntil(handle.unparsed, () => Position(start, curOffset, start), "</xml:unparsed>")
+ }
+
+ /** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
+ *
+ * see [15]
+ */
+ def xComment: Tree = {
+ val start = curOffset - 2 // Rewinding to include "<!"
+ xToken("--")
+ xTakeUntil(handle.comment, () => Position(start, curOffset, start), "-->")
+ }
+
+ def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
+ def append(t: String) = ts append handle.text(pos, t)
+
+ if (preserveWS) append(txt)
+ else {
+ val sb = new StringBuilder()
+
+ txt foreach { c =>
+ if (!isSpace(c)) sb append c
+ else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
+ }
+
+ val trimmed = sb.toString.trim
+ if (!trimmed.isEmpty) append(trimmed)
+ }
+ }
+
+ /** adds entity/character to ts as side-effect
+ * @precond ch == '&'
+ */
+ def content_AMP(ts: ArrayBuffer[Tree]): Unit = {
+ nextch
+ val toAppend = ch match {
+ case '#' => // CharacterRef
+ nextch
+ val theChar = handle.text(tmppos, xCharRef)
+ xToken(';')
+ theChar
+ case _ => // EntityRef
+ val n = xName
+ xToken(';')
+ handle.entityRef(tmppos, n)
+ }
+
+ ts append toAppend
+ }
+
+ /**
+ * @precond ch == '{'
+ * @postcond: xEmbeddedBlock == false!
+ */
+ def content_BRACE(p: Position, ts: ArrayBuffer[Tree]): Unit =
+ if (xCheckEmbeddedBlock) ts append xEmbeddedExpr
+ else appendText(p, ts, xText)
+
+ /** Returns true if it encounters an end tag (without consuming it),
+ * appends trees to ts as side-effect.
+ *
+ * @param ts ...
+ * @return ...
+ */
+ private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
+ if (ch == '/')
+ return true // end tag
+
+ val toAppend = ch match {
+ case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch ; xProcInstr // PI
+ case _ => element // child node
+ }
+
+ ts append toAppend
+ false
+ }
+
+ def content: Buffer[Tree] = {
+ val ts = new ArrayBuffer[Tree]
+ while (true) {
+ if (xEmbeddedBlock)
+ ts append xEmbeddedExpr
+ else {
+ tmppos = Position(curOffset)
+ ch match {
+ // end tag, cdata, comment, pi or child node
+ case '<' => nextch ; if (content_LT(ts)) return ts
+ // either the character '{' or an embedded scala block }
+ case '{' => content_BRACE(tmppos, ts) // }
+ // EntityRef or CharRef
+ case '&' => content_AMP(ts)
+ case SU => return ts
+ // text content - here xEmbeddedBlock might be true
+ case _ => appendText(tmppos, ts, xText)
+ }
+ }
+ }
+ unreachable
+ }
+
+ /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
+ */
+ def element: Tree = {
+ val start = curOffset
+ val (qname, attrMap) = xTag(())
+ if (ch == '/') { // empty element
+ xToken("/>")
+ handle.element(Position(start, curOffset, start), qname, attrMap, true, new ListBuffer[Tree])
+ }
+ else { // handle content
+ xToken('>')
+ if (qname == "xml:unparsed")
+ return xUnparsed
+
+ debugLastStartElement.push((start, qname))
+ val ts = content
+ xEndTag(qname)
+ debugLastStartElement.pop
+ val pos = Position(start, curOffset, start)
+ qname match {
+ case "xml:group" => handle.group(pos, ts)
+ case _ => handle.element(pos, qname, attrMap, false, ts)
+ }
+ }
+ }
+
+ /** parse character data.
+ * precondition: xEmbeddedBlock == false (we are not in a scala block)
+ */
+ private def xText: String = {
+ assert(!xEmbeddedBlock, "internal error: encountered embedded block")
+ val buf = new StringBuilder
+ def done = buf.toString
+
+ while (ch != SU) {
+ if (ch == '}') {
+ if (charComingAfter(nextch) == '}') nextch
+ else errorBraces()
+ }
+
+ buf append ch
+ nextch
+ if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
+ return done
+ }
+ done
+ }
+
+ /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
+ private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = {
+ try return f()
+ catch {
+ case c @ TruncatedXMLControl =>
+ ifTruncated(c.getMessage)
+ case c @ (MissingEndTagControl | ConfusedAboutBracesControl) =>
+ parser.syntaxError(c.getMessage + debugLastElem + ">", debugLastPos)
+ case _: ArrayIndexOutOfBoundsException =>
+ parser.syntaxError("missing end tag in XML literal for <%s>" format debugLastElem, debugLastPos)
+ }
+ finally parser.in resume Tokens.XMLSTART
+
+ parser.errorTermTree
+ }
+
+ /** Use a lookahead parser to run speculative body, and return the first char afterward. */
+ private def charComingAfter(body: => Unit): Char = {
+ try {
+ input = input.lookaheadReader
+ body
+ ch
+ }
+ finally input = parser.in
+ }
+
+ /** xLiteral = element { element }
+ * @return Scala representation of this xml literal
+ */
+ def xLiteral: Tree = xLiteralCommon(
+ () => {
+ input = parser.in
+ handle.isPattern = false
+
+ val ts = new ArrayBuffer[Tree]
+ val start = curOffset
+ tmppos = Position(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees
+ content_LT(ts)
+
+ // parse more XML ?
+ if (charComingAfter(xSpaceOpt) == '<') {
+ xSpaceOpt
+ while (ch == '<') {
+ nextch
+ ts append element
+ xSpaceOpt
+ }
+ handle.makeXMLseq(Position(start, curOffset, start), ts)
+ }
+ else {
+ assert(ts.length == 1)
+ ts(0)
+ }
+ },
+ msg => parser.incompleteInputError(msg)
+ )
+
+ /** @see xmlPattern. resynchronizes after successful parse
+ * @return this xml pattern
+ */
+ def xLiteralPattern: Tree = xLiteralCommon(
+ () => {
+ input = parser.in
+ saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
+ handle.isPattern = true
+ val tree = xPattern
+ xSpaceOpt
+ tree
+ }
+ },
+ msg => parser.syntaxError(msg, curOffset)
+ )
+
+ def escapeToScala[A](op: => A, kind: String) = {
+ xEmbeddedBlock = false
+ val res = saving[List[Int], A](parser.in.sepRegions, parser.in.sepRegions = _) {
+ parser.in resume LBRACE
+ op
+ }
+ if (parser.in.token != RBRACE)
+ reportSyntaxError(" expected end of Scala " + kind)
+
+ res
+ }
+
+ def xEmbeddedExpr: Tree = escapeToScala(parser.block(), "block")
+
+ /** xScalaPatterns ::= patterns
+ */
+ def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(), "pattern")
+
+ def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(str, pos)
+ def reportSyntaxError(str: String): Unit = {
+ reportSyntaxError(curOffset, "in XML literal: " + str)
+ nextch()
+ }
+
+ /** '<' xPattern ::= Name [S] { xmlPattern | '{' pattern3 '}' } ETag
+ * | Name [S] '/' '>'
+ */
+ def xPattern: Tree = {
+ var start = curOffset
+ val qname = xName
+ debugLastStartElement.push((start, qname))
+ xSpaceOpt
+
+ val ts = new ArrayBuffer[Tree]
+
+ val isEmptyTag = ch == '/'
+ if (isEmptyTag) nextch()
+ xToken('>')
+
+ if (!isEmptyTag) {
+ // recurses until it hits a termination condition, then returns
+ def doPattern: Boolean = {
+ val start1 = curOffset
+ if (xEmbeddedBlock) ts ++= xScalaPatterns
+ else ch match {
+ case '<' => // tag
+ nextch
+ if (ch != '/') ts append xPattern // child
+ else return false // terminate
+
+ case '{' => // embedded Scala patterns
+ while (ch == '{') {
+ nextch
+ ts ++= xScalaPatterns
+ }
+ assert(!xEmbeddedBlock, "problem with embedded block")
+
+ case SU =>
+ throw TruncatedXMLControl
+
+ case _ => // text
+ appendText(Position(start1, curOffset, start1), ts, xText)
+ // here xEmbeddedBlock might be true:
+ // if (xEmbeddedBlock) throw new ApplicationError("after:" + text); // assert
+ }
+ true
+ }
+
+ while (doPattern) { } // call until false
+ xEndTag(qname)
+ debugLastStartElement.pop
+ }
+
+ handle.makeXMLpat(Position(start, curOffset, start), qname, ts)
+ }
+ } /* class MarkupParser */
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
new file mode 100644
index 000000000..fa0576c7a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -0,0 +1,2309 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable.ListBuffer
+import scala.collection.immutable.BitSet
+import util.{ SourceFile, SourcePosition }
+import Tokens._
+import Scanners._
+import MarkupParsers._
+import core._
+import Flags._
+import Contexts._
+import Names._
+import ast.Positioned
+import ast.Trees._
+import Decorators._
+import StdNames._
+import util.Positions._
+import Constants._
+import ScriptParsers._
+import Comments._
+import scala.annotation.{tailrec, switch}
+import util.DotClass
+import rewrite.Rewrites.patch
+
+object Parsers {
+
+ import ast.untpd._
+ import reporting.diagnostic.Message
+ import reporting.diagnostic.messages._
+
+ case class OpInfo(operand: Tree, operator: Name, offset: Offset)
+
+ class ParensCounters {
+ private var parCounts = new Array[Int](lastParen - firstParen)
+
+ def count(tok: Token) = parCounts(tok - firstParen)
+ def change(tok: Token, delta: Int) = parCounts(tok - firstParen) += delta
+ def nonePositive: Boolean = parCounts forall (_ <= 0)
+ }
+
+ @sharable object Location extends Enumeration {
+ val InParens, InBlock, InPattern, ElseWhere = Value
+ }
+
+ @sharable object ParamOwner extends Enumeration {
+ val Class, Type, TypeParam, Def = Value
+ }
+
+ /** The parse starting point depends on whether the source file is self-contained:
+ * if not, the AST will be supplemented.
+ */
+ def parser(source: SourceFile)(implicit ctx: Context) =
+ if (source.isSelfContained) new ScriptParser(source)
+ else new Parser(source)
+
+ abstract class ParserCommon(val source: SourceFile)(implicit ctx: Context) extends DotClass {
+
+ val in: ScannerCommon
+
+ /* ------------- POSITIONS ------------------------------------------- */
+
+ /** Positions tree.
+ * If `t` does not have a position yet, set its position to the given one.
+ */
+ def atPos[T <: Positioned](pos: Position)(t: T): T =
+ if (t.pos.isSourceDerived) t else t.withPos(pos)
+
+ def atPos[T <: Positioned](start: Offset, point: Offset, end: Offset)(t: T): T =
+ atPos(Position(start, end, point))(t)
+
+ /** If the last read offset is strictly greater than `start`, position tree
+ * to position spanning from `start` to last read offset, with given point.
+ * If the last offset is less than or equal to start, the tree `t` did not
+ * consume any source for its construction. In this case, don't position it yet,
+ * but wait for its position to be determined by `setChildPositions` when the
+ * parent node is positioned.
+ */
+ def atPos[T <: Positioned](start: Offset, point: Offset)(t: T): T =
+ if (in.lastOffset > start) atPos(start, point, in.lastOffset)(t) else t
+
+ def atPos[T <: Positioned](start: Offset)(t: T): T =
+ atPos(start, start)(t)
+
+ def nameStart: Offset =
+ if (in.token == BACKQUOTED_IDENT) in.offset + 1 else in.offset
+
+ def sourcePos(off: Int = in.offset): SourcePosition =
+ source atPos Position(off)
+
+
+ /* ------------- ERROR HANDLING ------------------------------------------- */
+ /** The offset where the last syntax error was reported, or if a skip to a
+ * safepoint occurred afterwards, the offset of the safe point.
+ */
+ protected var lastErrorOffset : Int = -1
+
+ /** Issue an error at given offset if beyond last error offset
+ * and update lastErrorOffset.
+ */
+ def syntaxError(msg: => Message, offset: Int = in.offset): Unit =
+ if (offset > lastErrorOffset) {
+ syntaxError(msg, Position(offset))
+ lastErrorOffset = in.offset
+ }
+
+ /** Unconditionally issue an error at given position, without
+ * updating lastErrorOffset.
+ */
+ def syntaxError(msg: => Message, pos: Position): Unit =
+ ctx.error(msg, source atPos pos)
+
+ }
+
+ class Parser(source: SourceFile)(implicit ctx: Context) extends ParserCommon(source) {
+
+ val in: Scanner = new Scanner(source)
+
+ val openParens = new ParensCounters
+
+ /** This is the general parse entry point.
+ * Overridden by ScriptParser
+ */
+ def parse(): Tree = {
+ val t = compilationUnit()
+ accept(EOF)
+ t
+ }
+
+/* -------------- TOKEN CLASSES ------------------------------------------- */
+
+ def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
+ def isIdent(name: Name) = in.token == IDENTIFIER && in.name == name
+ def isSimpleLiteral = simpleLiteralTokens contains in.token
+ def isLiteral = literalTokens contains in.token
+ def isNumericLit = numericLitTokens contains in.token
+ def isModifier = modifierTokens contains in.token
+ def isExprIntro = canStartExpressionTokens contains in.token
+ def isTemplateIntro = templateIntroTokens contains in.token
+ def isDclIntro = dclIntroTokens contains in.token
+ def isStatSeqEnd = in.token == RBRACE || in.token == EOF
+ def mustStartStat = mustStartStatTokens contains in.token
+
+ def isDefIntro(allowedMods: BitSet) =
+ in.token == AT || (allowedMods contains in.token) || (defIntroTokens contains in.token)
+
+ def isStatSep: Boolean =
+ in.token == NEWLINE || in.token == NEWLINES || in.token == SEMI
+
+/* ------------- ERROR HANDLING ------------------------------------------- */
+
+ /** The offset of the last time when a statement on a new line was definitely
+ * encountered in the current scope or an outer scope.
+ */
+ private var lastStatOffset = -1
+
+ def setLastStatOffset() =
+ if (mustStartStat && in.isAfterLineEnd)
+ lastStatOffset = in.offset
+
+ /** Is offset1 less or equally indented than offset2?
+ * This is the case if the characters between the preceding end-of-line and offset1
+ * are a prefix of the characters between the preceding end-of-line and offset2.
+ */
+ def isLeqIndented(offset1: Int, offset2: Int): Boolean = {
+ def recur(idx1: Int, idx2: Int): Boolean =
+ idx1 == offset1 ||
+ idx2 < offset2 && source(idx1) == source(idx2) && recur(idx1 + 1, idx2 + 1)
+ recur(source.startOfLine(offset1), source.startOfLine(offset2))
+ }
+
+ /** Skip on error to next safe point.
+ * Safe points are:
+ * - Closing braces, provided they match an opening brace before the error point.
+ * - Closing parens and brackets, provided they match an opening parent or bracket
+ * before the error point and there are no intervening other kinds of parens.
+ * - Semicolons and newlines, provided there are no intervening braces.
+ * - Definite statement starts on new lines, provided they are not more indented
+ * than the last known statement start before the error point.
+ */
+ protected def skip(): Unit = {
+ val skippedParens = new ParensCounters
+ while (true) {
+ (in.token: @switch) match {
+ case EOF =>
+ return
+ case SEMI | NEWLINE | NEWLINES =>
+ if (skippedParens.count(LBRACE) == 0) return
+ case RBRACE =>
+ if (openParens.count(LBRACE) > 0 && skippedParens.count(LBRACE) == 0)
+ return
+ skippedParens.change(LBRACE, -1)
+ case RPAREN =>
+ if (openParens.count(LPAREN) > 0 && skippedParens.nonePositive)
+ return
+ skippedParens.change(LPAREN, -1)
+ case RBRACKET =>
+ if (openParens.count(LBRACKET) > 0 && skippedParens.nonePositive)
+ return
+ skippedParens.change(LBRACKET, -1)
+ case LBRACE =>
+ skippedParens.change(LBRACE, + 1)
+ case LPAREN =>
+ skippedParens.change(LPAREN, + 1)
+ case LBRACKET=>
+ skippedParens.change(LBRACKET, + 1)
+ case _ =>
+ if (mustStartStat &&
+ in.isAfterLineEnd() &&
+ isLeqIndented(in.offset, lastStatOffset max 0))
+ return
+ }
+ in.nextToken()
+ }
+ }
+
+ def warning(msg: => Message, sourcePos: SourcePosition) =
+ ctx.warning(msg, sourcePos)
+
+ def warning(msg: => Message, offset: Int = in.offset) =
+ ctx.warning(msg, source atPos Position(offset))
+
+ def deprecationWarning(msg: => Message, offset: Int = in.offset) =
+ ctx.deprecationWarning(msg, source atPos Position(offset))
+
+ /** Issue an error at current offset taht input is incomplete */
+ def incompleteInputError(msg: => Message) =
+ ctx.incompleteInputError(msg, source atPos Position(in.offset))
+
+ /** If at end of file, issue an incompleteInputError.
+ * Otherwise issue a syntax error and skip to next safe point.
+ */
+ def syntaxErrorOrIncomplete(msg: => Message) =
+ if (in.token == EOF) incompleteInputError(msg)
+ else {
+ syntaxError(msg)
+ skip()
+ lastErrorOffset = in.offset
+ } // DEBUG
+
+ private def expectedMsg(token: Int): String =
+ expectedMessage(showToken(token))
+ private def expectedMessage(what: String): String =
+ s"$what expected but ${showToken(in.token)} found"
+
+ /** Consume one token of the specified type, or
+ * signal an error if it is not there.
+ *
+ * @return The offset at the start of the token to accept
+ */
+ def accept(token: Int): Int = {
+ val offset = in.offset
+ if (in.token != token) {
+ syntaxErrorOrIncomplete(expectedMsg(token))
+ }
+ if (in.token == token) in.nextToken()
+ offset
+ }
+
+ /** semi = nl {nl} | `;'
+ * nl = `\n' // where allowed
+ */
+ def acceptStatSep(): Unit = in.token match {
+ case NEWLINE | NEWLINES => in.nextToken()
+ case _ => accept(SEMI)
+ }
+
+ def acceptStatSepUnlessAtEnd(altEnd: Token = EOF) =
+ if (!isStatSeqEnd && in.token != altEnd) acceptStatSep()
+
+ def errorTermTree = atPos(in.offset) { Literal(Constant(null)) }
+
+ private var inFunReturnType = false
+ private def fromWithinReturnType[T](body: => T): T = {
+ val saved = inFunReturnType
+ try {
+ inFunReturnType = true
+ body
+ } finally inFunReturnType = saved
+ }
+
+ def migrationWarningOrError(msg: String, offset: Int = in.offset) =
+ if (in.isScala2Mode)
+ ctx.migrationWarning(msg, source atPos Position(offset))
+ else
+ syntaxError(msg, offset)
+
+/* ---------- TREE CONSTRUCTION ------------------------------------------- */
+
+ /** Convert tree to formal parameter list
+ */
+ def convertToParams(tree: Tree): List[ValDef] = tree match {
+ case Parens(t) => convertToParam(t) :: Nil
+ case Tuple(ts) => ts map (convertToParam(_))
+ case t => convertToParam(t) :: Nil
+ }
+
+ /** Convert tree to formal parameter
+ */
+ def convertToParam(tree: Tree, mods: Modifiers = Modifiers(), expected: String = "formal parameter"): ValDef = tree match {
+ case Ident(name) =>
+ makeParameter(name.asTermName, TypeTree(), mods) withPos tree.pos
+ case Typed(Ident(name), tpt) =>
+ makeParameter(name.asTermName, tpt, mods) withPos tree.pos
+ case _ =>
+ syntaxError(s"not a legal $expected", tree.pos)
+ makeParameter(nme.ERROR, tree, mods)
+ }
+
+ /** Convert (qual)ident to type identifier
+ */
+ def convertToTypeId(tree: Tree): Tree = tree match {
+ case id @ Ident(name) =>
+ cpy.Ident(id)(name.toTypeName)
+ case id @ Select(qual, name) =>
+ cpy.Select(id)(qual, name.toTypeName)
+ case _ =>
+ syntaxError(IdentifierExpected(tree.show), tree.pos)
+ tree
+ }
+
+/* --------------- PLACEHOLDERS ------------------------------------------- */
+
+ /** The implicit parameters introduced by `_` in the current expression.
+ * Parameters appear in reverse order.
+ */
+ var placeholderParams: List[ValDef] = Nil
+
+ def checkNoEscapingPlaceholders[T](op: => T): T = {
+ val savedPlaceholderParams = placeholderParams
+ placeholderParams = Nil
+
+ try op
+ finally {
+ placeholderParams match {
+ case vd :: _ => syntaxError(UnboundPlaceholderParameter(), vd.pos)
+ case _ =>
+ }
+ placeholderParams = savedPlaceholderParams
+ }
+ }
+
+ def isWildcard(t: Tree): Boolean = t match {
+ case Ident(name1) => placeholderParams.nonEmpty && name1 == placeholderParams.head.name
+ case Typed(t1, _) => isWildcard(t1)
+ case Annotated(t1, _) => isWildcard(t1)
+ case Parens(t1) => isWildcard(t1)
+ case _ => false
+ }
+
+/* -------------- XML ---------------------------------------------------- */
+
+ /** the markup parser */
+ lazy val xmlp = new MarkupParser(this, true)
+
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, true) // DEBUG choices
+
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
+
+/* -------- COMBINATORS -------------------------------------------------------- */
+
+ def enclosed[T](tok: Token, body: => T): T = {
+ accept(tok)
+ openParens.change(tok, 1)
+ try body
+ finally {
+ accept(tok + 1)
+ openParens.change(tok, -1)
+ }
+ }
+
+ def inParens[T](body: => T): T = enclosed(LPAREN, body)
+ def inBraces[T](body: => T): T = enclosed(LBRACE, body)
+ def inBrackets[T](body: => T): T = enclosed(LBRACKET, body)
+
+ def inDefScopeBraces[T](body: => T): T = {
+ val saved = lastStatOffset
+ try inBraces(body)
+ finally lastStatOffset = saved
+ }
+
+ /** part { `separator` part }
+ */
+ def tokenSeparated[T](separator: Int, part: () => T): List[T] = {
+ val ts = new ListBuffer[T] += part()
+ while (in.token == separator) {
+ in.nextToken()
+ ts += part()
+ }
+ ts.toList
+ }
+
+ def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
+
+/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
+
+ var opStack: List[OpInfo] = Nil
+
+ def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ if (isLeftAssoc(op) != leftAssoc)
+ syntaxError(
+ "left- and right-associative operators with same precedence may not be mixed", offset)
+
+ def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean): Tree = {
+ if (opStack != base && precedence(opStack.head.operator) == prec)
+ checkAssoc(opStack.head.offset, opStack.head.operator, leftAssoc)
+ def recur(top: Tree): Tree = {
+ if (opStack == base) top
+ else {
+ val opInfo = opStack.head
+ val opPrec = precedence(opInfo.operator)
+ if (prec < opPrec || leftAssoc && prec == opPrec) {
+ opStack = opStack.tail
+ recur {
+ val opPos = Position(opInfo.offset, opInfo.offset + opInfo.operator.length, opInfo.offset)
+ atPos(opPos union opInfo.operand.pos union top.pos) {
+ InfixOp(opInfo.operand, opInfo.operator, top)
+ }
+ }
+ }
+ else top
+ }
+ }
+ recur(top)
+ }
+
+ /** operand { infixop operand} [postfixop],
+ * respecting rules of associativity and precedence.
+ * @param notAnOperator a token that does not count as operator.
+ * @param maybePostfix postfix operators are allowed.
+ */
+ def infixOps(
+ first: Tree, canStartOperand: Token => Boolean, operand: () => Tree,
+ isType: Boolean = false,
+ notAnOperator: Name = nme.EMPTY,
+ maybePostfix: Boolean = false): Tree = {
+ val base = opStack
+ var top = first
+ while (isIdent && in.name != notAnOperator) {
+ val op = if (isType) in.name.toTypeName else in.name
+ top = reduceStack(base, top, precedence(op), isLeftAssoc(op))
+ opStack = OpInfo(top, op, in.offset) :: opStack
+ ident()
+ newLineOptWhenFollowing(canStartOperand)
+ if (maybePostfix && !canStartOperand(in.token)) {
+ val topInfo = opStack.head
+ opStack = opStack.tail
+ val od = reduceStack(base, topInfo.operand, 0, true)
+ return atPos(od.pos.start, topInfo.offset) {
+ PostfixOp(od, topInfo.operator)
+ }
+ }
+ top = operand()
+ }
+ reduceStack(base, top, 0, true)
+ }
+
+/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
+
+ /** Accept identifier and return its name as a term name. */
+ def ident(): TermName =
+ if (isIdent) {
+ val name = in.name
+ in.nextToken()
+ name
+ } else {
+ syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER))
+ nme.ERROR
+ }
+
+ /** Accept identifier and return Ident with its name as a term name. */
+ def termIdent(): Ident = atPos(in.offset) {
+ makeIdent(in.token, ident())
+ }
+
+ /** Accept identifier and return Ident with its name as a type name. */
+ def typeIdent(): Ident = atPos(in.offset) {
+ makeIdent(in.token, ident().toTypeName)
+ }
+
+ private def makeIdent(tok: Token, name: Name) =
+ if (tok == BACKQUOTED_IDENT) BackquotedIdent(name)
+ else Ident(name)
+
+ def wildcardIdent(): Ident =
+ atPos(accept(USCORE)) { Ident(nme.WILDCARD) }
+
+ def termIdentOrWildcard(): Ident =
+ if (in.token == USCORE) wildcardIdent() else termIdent()
+
+ /** Accept identifier acting as a selector on given tree `t`. */
+ def selector(t: Tree): Tree =
+ atPos(t.pos.start, in.offset) { Select(t, ident()) }
+
+ /** Selectors ::= ident { `.' ident()
+ *
+ * Accept `.' separated identifiers acting as a selectors on given tree `t`.
+ * @param finish An alternative parse in case the next token is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def selectors(t: Tree, finish: Tree => Tree): Tree = {
+ val t1 = finish(t)
+ if (t1 ne t) t1 else dotSelectors(selector(t), finish)
+ }
+
+ /** DotSelectors ::= { `.' ident()
+ *
+ * Accept `.' separated identifiers acting as a selectors on given tree `t`.
+ * @param finish An alternative parse in case the token following a `.' is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def dotSelectors(t: Tree, finish: Tree => Tree = id) =
+ if (in.token == DOT) { in.nextToken(); selectors(t, finish) }
+ else t
+
+ private val id: Tree => Tree = x => x
+
+ /** Path ::= StableId
+ * | [Ident `.'] this
+ *
+ * @param thisOK If true, [Ident `.'] this is acceptable as the path.
+ * If false, another selection is required after the `this`.
+ * @param finish An alternative parse in case the token following a `.' is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def path(thisOK: Boolean, finish: Tree => Tree = id): Tree = {
+ val start = in.offset
+ def handleThis(qual: Ident) = {
+ in.nextToken()
+ val t = atPos(start) { This(qual) }
+ if (!thisOK && in.token != DOT) syntaxError("`.' expected")
+ dotSelectors(t, finish)
+ }
+ def handleSuper(qual: Ident) = {
+ in.nextToken()
+ val mix = mixinQualifierOpt()
+ val t = atPos(start) { Super(This(qual), mix) }
+ accept(DOT)
+ dotSelectors(selector(t), finish)
+ }
+ if (in.token == THIS) handleThis(EmptyTypeIdent)
+ else if (in.token == SUPER) handleSuper(EmptyTypeIdent)
+ else {
+ val t = termIdent()
+ if (in.token == DOT) {
+ def qual = cpy.Ident(t)(t.name.toTypeName)
+ in.nextToken()
+ if (in.token == THIS) handleThis(qual)
+ else if (in.token == SUPER) handleSuper(qual)
+ else selectors(t, finish)
+ }
+ else t
+ }
+ }
+
+ /** MixinQualifier ::= `[' Id `]'
+ */
+ def mixinQualifierOpt(): Ident =
+ if (in.token == LBRACKET) inBrackets(atPos(in.offset) { typeIdent() })
+ else EmptyTypeIdent
+
+ /** StableId ::= Id
+ * | Path `.' Id
+ * | [id '.'] super [`[' id `]']`.' id
+ */
+ def stableId(): Tree =
+ path(thisOK = false)
+
+ /** QualId ::= Id {`.' Id}
+ */
+ def qualId(): Tree =
+ dotSelectors(termIdent())
+
+ /** SimpleExpr ::= literal
+ * | symbol
+ * | null
+ * @param negOffset The offset of a preceding `-' sign, if any.
+ * If the literal is not negated, negOffset = in.offset.
+ */
+ def literal(negOffset: Int = in.offset, inPattern: Boolean = false): Tree = {
+ def finish(value: Any): Tree = {
+ val t = atPos(negOffset) { Literal(Constant(value)) }
+ in.nextToken()
+ t
+ }
+ val isNegated = negOffset < in.offset
+ atPos(negOffset) {
+ if (in.token == SYMBOLLIT) atPos(in.skipToken()) { SymbolLit(in.strVal) }
+ else if (in.token == INTERPOLATIONID) interpolatedString()
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
+ syntaxErrorOrIncomplete(IllegalLiteral())
+ null
+ })
+ }
+ }
+
+ private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
+ val segmentBuf = new ListBuffer[Tree]
+ val interpolator = in.name
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ segmentBuf += Thicket(
+ literal(),
+ atPos(in.offset) {
+ if (in.token == IDENTIFIER)
+ termIdent()
+ else if (in.token == THIS) {
+ in.nextToken()
+ This(EmptyTypeIdent)
+ }
+ else if (in.token == LBRACE)
+ if (inPattern) Block(Nil, inBraces(pattern()))
+ else expr()
+ else {
+ ctx.error(InterpolatedStringError())
+ EmptyTree
+ }
+ })
+ }
+ if (in.token == STRINGLIT) segmentBuf += literal()
+ InterpolatedString(interpolator, segmentBuf.toList)
+ }
+
+/* ------------- NEW LINES ------------------------------------------------- */
+
+ def newLineOpt(): Unit = {
+ if (in.token == NEWLINE) in.nextToken()
+ }
+
+ def newLinesOpt(): Unit = {
+ if (in.token == NEWLINE || in.token == NEWLINES)
+ in.nextToken()
+ }
+
+ def newLineOptWhenFollowedBy(token: Int): Unit = {
+ // note: next is defined here because current == NEWLINE
+ if (in.token == NEWLINE && in.next.token == token) newLineOpt()
+ }
+
+ def newLineOptWhenFollowing(p: Int => Boolean): Unit = {
+ // note: next is defined here because current == NEWLINE
+ if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
+ }
+
+/* ------------- TYPES ------------------------------------------------------ */
+ /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and
+ * returns a tree for type `Any` instead.
+ */
+ def toplevelTyp(): Tree = {
+ val t = typ()
+ findWildcardType(t) match {
+ case Some(wildcardPos) =>
+ syntaxError("unbound wildcard type", wildcardPos)
+ scalaAny
+ case None => t
+ }
+ }
+
+ /** Type ::= FunArgTypes `=>' Type
+ * | HkTypeParamClause `->' Type
+ * | InfixType
+ * FunArgTypes ::= InfixType
+ * | `(' [ FunArgType {`,' FunArgType } ] `)'
+ */
+ def typ(): Tree = {
+ val start = in.offset
+ val t =
+ if (in.token == LPAREN) {
+ in.nextToken()
+ if (in.token == RPAREN) {
+ in.nextToken()
+ atPos(start, accept(ARROW)) { Function(Nil, typ()) }
+ }
+ else {
+ openParens.change(LPAREN, 1)
+ val ts = commaSeparated(funArgType)
+ openParens.change(LPAREN, -1)
+ accept(RPAREN)
+ if (in.token == ARROW)
+ atPos(start, in.skipToken()) { Function(ts, typ()) }
+ else {
+ for (t <- ts)
+ if (t.isInstanceOf[ByNameTypeTree])
+ syntaxError(ByNameParameterNotSupported())
+ val tuple = atPos(start) { makeTupleOrParens(ts) }
+ infixTypeRest(refinedTypeRest(withTypeRest(simpleTypeRest(tuple))))
+ }
+ }
+ }
+ else if (in.token == LBRACKET) {
+ val start = in.offset
+ val tparams = typeParamClause(ParamOwner.TypeParam)
+ if (in.token == ARROW)
+ atPos(start, in.skipToken())(PolyTypeTree(tparams, typ()))
+ else { accept(ARROW); typ() }
+ }
+ else infixType()
+
+ in.token match {
+ case ARROW => atPos(start, in.skipToken()) { Function(List(t), typ()) }
+ case FORSOME => syntaxError("existential types no longer supported; use a wildcard type or dependent type instead"); t
+ case _ => t
+ }
+ }
+
+ /** InfixType ::= RefinedType {id [nl] refinedType}
+ */
+ def infixType(): Tree = infixTypeRest(refinedType())
+
+ def infixTypeRest(t: Tree): Tree =
+ infixOps(t, canStartTypeTokens, refinedType, isType = true, notAnOperator = nme.raw.STAR)
+
+ /** RefinedType ::= WithType {Annotation | [nl] Refinement}
+ */
+ val refinedType: () => Tree = () => refinedTypeRest(withType())
+
+ def refinedTypeRest(t: Tree): Tree = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LBRACE) refinedTypeRest(atPos(t.pos.start) { RefinedTypeTree(t, refinement()) })
+ else t
+ }
+
+ /** WithType ::= AnnotType {`with' AnnotType} (deprecated)
+ */
+ def withType(): Tree = withTypeRest(annotType())
+
+ def withTypeRest(t: Tree): Tree =
+ if (in.token == WITH) {
+ deprecationWarning(DeprecatedWithOperator())
+ in.nextToken()
+ AndTypeTree(t, withType())
+ }
+ else t
+
+ /** AnnotType ::= SimpleType {Annotation}
+ */
+ def annotType(): Tree = annotTypeRest(simpleType())
+
+ def annotTypeRest(t: Tree): Tree =
+ if (in.token == AT) annotTypeRest(atPos(t.pos.start) { Annotated(t, annot()) })
+ else t
+
+ /** SimpleType ::= SimpleType TypeArgs
+ * | SimpleType `#' Id
+ * | StableId
+ * | Path `.' type
+ * | `(' ArgTypes `)'
+ * | `_' TypeBounds
+ * | Refinement
+ * | Literal
+ */
+ def simpleType(): Tree = simpleTypeRest {
+ if (in.token == LPAREN)
+ atPos(in.offset) { makeTupleOrParens(inParens(argTypes())) }
+ else if (in.token == LBRACE)
+ atPos(in.offset) { RefinedTypeTree(EmptyTree, refinement()) }
+ else if (isSimpleLiteral) { SingletonTypeTree(literal()) }
+ else if (in.token == USCORE) {
+ val start = in.skipToken()
+ typeBounds().withPos(Position(start, in.lastOffset, start))
+ }
+ else path(thisOK = false, handleSingletonType) match {
+ case r @ SingletonTypeTree(_) => r
+ case r => convertToTypeId(r)
+ }
+ }
+
+ val handleSingletonType: Tree => Tree = t =>
+ if (in.token == TYPE) {
+ in.nextToken()
+ atPos(t.pos.start) { SingletonTypeTree(t) }
+ } else t
+
+ private def simpleTypeRest(t: Tree): Tree = in.token match {
+ case HASH => simpleTypeRest(typeProjection(t))
+ case LBRACKET => simpleTypeRest(atPos(t.pos.start) { AppliedTypeTree(t, typeArgs(namedOK = true)) })
+ case _ => t
+ }
+
+ private def typeProjection(t: Tree): Tree = {
+ accept(HASH)
+ val id = typeIdent()
+ atPos(t.pos.start, id.pos.start) { Select(t, id.name) }
+ }
+
+ /** NamedTypeArg ::= id `=' Type
+ */
+ val namedTypeArg = () => {
+ val name = ident()
+ accept(EQUALS)
+ NamedArg(name.toTypeName, typ())
+ }
+
+ /** ArgTypes ::= Type {`,' Type}
+ * | NamedTypeArg {`,' NamedTypeArg}
+ */
+ def argTypes(namedOK: Boolean = false) = {
+ def otherArgs(first: Tree, arg: () => Tree): List[Tree] = {
+ val rest =
+ if (in.token == COMMA) {
+ in.nextToken()
+ commaSeparated(arg)
+ }
+ else Nil
+ first :: rest
+ }
+ if (namedOK && in.token == IDENTIFIER)
+ typ() match {
+ case Ident(name) if in.token == EQUALS =>
+ in.nextToken()
+ otherArgs(NamedArg(name, typ()), namedTypeArg)
+ case firstArg =>
+ if (in.token == EQUALS) println(s"??? $firstArg")
+ otherArgs(firstArg, typ)
+ }
+ else commaSeparated(typ)
+ }
+
+ /** FunArgType ::= Type | `=>' Type
+ */
+ val funArgType = () =>
+ if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(typ()) }
+ else typ()
+
+ /** ParamType ::= [`=>'] ParamValueType
+ */
+ def paramType(): Tree =
+ if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(paramValueType()) }
+ else paramValueType()
+
+ /** ParamValueType ::= Type [`*']
+ */
+ def paramValueType(): Tree = {
+ val t = toplevelTyp()
+ if (isIdent(nme.raw.STAR)) {
+ in.nextToken()
+ atPos(t.pos.start) { PostfixOp(t, nme.raw.STAR) }
+ } else t
+ }
+
+ /** TypeArgs ::= `[' Type {`,' Type} `]'
+ * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]'
+ */
+ def typeArgs(namedOK: Boolean = false): List[Tree] = inBrackets(argTypes(namedOK))
+
+ /** Refinement ::= `{' RefineStatSeq `}'
+ */
+ def refinement(): List[Tree] = inBraces(refineStatSeq())
+
+ /** TypeBounds ::= [`>:' Type] [`<:' Type]
+ */
+ def typeBounds(): TypeBoundsTree =
+ atPos(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) }
+
+ private def bound(tok: Int): Tree =
+ if (in.token == tok) { in.nextToken(); toplevelTyp() }
+ else EmptyTree
+
+ /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type}
+ */
+ def typeParamBounds(pname: TypeName): Tree = {
+ val t = typeBounds()
+ val cbs = contextBounds(pname)
+ if (cbs.isEmpty) t
+ else atPos((t.pos union cbs.head.pos).start) { ContextBounds(t, cbs) }
+ }
+
+ def contextBounds(pname: TypeName): List[Tree] = in.token match {
+ case COLON =>
+ atPos(in.skipToken) {
+ AppliedTypeTree(toplevelTyp(), Ident(pname))
+ } :: contextBounds(pname)
+ case VIEWBOUND =>
+ deprecationWarning("view bounds `<%' are deprecated, use a context bound `:' instead")
+ atPos(in.skipToken) {
+ Function(Ident(pname) :: Nil, toplevelTyp())
+ } :: contextBounds(pname)
+ case _ =>
+ Nil
+ }
+
+ def typedOpt(): Tree =
+ if (in.token == COLON) { in.nextToken(); toplevelTyp() }
+ else TypeTree()
+
+ def typeDependingOn(location: Location.Value): Tree =
+ if (location == Location.InParens) typ()
+ else if (location == Location.InPattern) refinedType()
+ else infixType()
+
+ /** Checks whether `t` is a wildcard type.
+ * If it is, returns the [[Position]] where the wildcard occurs.
+ */
+ @tailrec
+ private final def findWildcardType(t: Tree): Option[Position] = t match {
+ case TypeBoundsTree(_, _) => Some(t.pos)
+ case Parens(t1) => findWildcardType(t1)
+ case Annotated(t1, _) => findWildcardType(t1)
+ case _ => None
+ }
+
+/* ----------- EXPRESSIONS ------------------------------------------------ */
+
+ /** EqualsExpr ::= `=' Expr
+ */
+ def equalsExpr(): Tree = {
+ accept(EQUALS)
+ expr()
+ }
+
+ def condExpr(altToken: Token): Tree = {
+ if (in.token == LPAREN) {
+ val t = atPos(in.offset) { Parens(inParens(exprInParens())) }
+ if (in.token == altToken) in.nextToken()
+ t
+ } else {
+ val t = expr()
+ accept(altToken)
+ t
+ }
+ }
+
+ /** Expr ::= FunParams `=>' Expr
+ * | Expr1
+ * FunParams ::= Bindings
+ * | [`implicit'] Id
+ * | `_'
+ * ExprInParens ::= PostfixExpr `:' Type
+ * | Expr
+ * BlockResult ::= (FunParams | [`implicit'] Id `:' InfixType) => Block
+ * | Expr1
+ * Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] else Expr]
+ * | `if' Expr `then' Expr [[semi] else Expr]
+ * | `while' `(' Expr `)' {nl} Expr
+ * | `while' Expr `do' Expr
+ * | `do' Expr [semi] `while' Expr
+ * | `try' Expr Catches [`finally' Expr]
+ * | `try' Expr [`finally' Expr]
+ * | `throw' Expr
+ * | `return' [Expr]
+ * | ForExpr
+ * | [SimpleExpr `.'] Id `=' Expr
+ * | SimpleExpr1 ArgumentExprs `=' Expr
+ * | PostfixExpr [Ascription]
+ * | PostfixExpr `match' `{' CaseClauses `}'
+ * Bindings ::= `(' [Binding {`,' Binding}] `)'
+ * Binding ::= (Id | `_') [`:' Type]
+ * Ascription ::= `:' CompoundType
+ * | `:' Annotation {Annotation}
+ * | `:' `_' `*'
+ */
+ val exprInParens = () => expr(Location.InParens)
+
+ def expr(): Tree = expr(Location.ElseWhere)
+
+ def expr(location: Location.Value): Tree = {
+ val saved = placeholderParams
+ placeholderParams = Nil
+ val t = expr1(location)
+ if (in.token == ARROW) {
+ placeholderParams = saved
+ closureRest(t.pos.start, location, convertToParams(t))
+ }
+ else if (isWildcard(t)) {
+ placeholderParams = placeholderParams ::: saved
+ t
+ }
+ else
+ try
+ if (placeholderParams.isEmpty) t
+ else new WildcardFunction(placeholderParams.reverse, t)
+ finally placeholderParams = saved
+ }
+
+ def expr1(location: Location.Value = Location.ElseWhere): Tree = in.token match {
+ case IF =>
+ atPos(in.skipToken()) {
+ val cond = condExpr(THEN)
+ newLinesOpt()
+ val thenp = expr()
+ val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
+ else EmptyTree
+ If(cond, thenp, elsep)
+ }
+ case WHILE =>
+ atPos(in.skipToken()) {
+ val cond = condExpr(DO)
+ newLinesOpt()
+ val body = expr()
+ WhileDo(cond, body)
+ }
+ case DO =>
+ atPos(in.skipToken()) {
+ val body = expr()
+ if (isStatSep) in.nextToken()
+ accept(WHILE)
+ val cond = expr()
+ DoWhile(body, cond)
+ }
+ case TRY =>
+ val tryOffset = in.offset
+ atPos(in.skipToken()) {
+ val body = expr()
+ val (handler, handlerStart) =
+ if (in.token == CATCH) {
+ val pos = in.offset
+ in.nextToken()
+ (expr(), pos)
+ } else (EmptyTree, -1)
+
+ handler match {
+ case Block(Nil, EmptyTree) =>
+ assert(handlerStart != -1)
+ syntaxError(
+ new EmptyCatchBlock(body),
+ Position(handlerStart, handler.pos.end)
+ )
+ case _ =>
+ }
+
+ val finalizer =
+ if (in.token == FINALLY) { accept(FINALLY); expr() }
+ else {
+ if (handler.isEmpty) warning(
+ EmptyCatchAndFinallyBlock(body),
+ source atPos Position(tryOffset, body.pos.end)
+ )
+ EmptyTree
+ }
+ ParsedTry(body, handler, finalizer)
+ }
+ case THROW =>
+ atPos(in.skipToken()) { Throw(expr()) }
+ case RETURN =>
+ atPos(in.skipToken()) { Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) }
+ case FOR =>
+ forExpr()
+ case IMPLICIT =>
+ implicitClosure(in.skipToken(), location)
+ case _ =>
+ expr1Rest(postfixExpr(), location)
+ }
+
+ def expr1Rest(t: Tree, location: Location.Value) = in.token match {
+ case EQUALS =>
+ t match {
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ atPos(t.pos.start, in.skipToken()) { Assign(t, expr()) }
+ case _ =>
+ t
+ }
+ case COLON =>
+ ascription(t, location)
+ case MATCH =>
+ atPos(t.pos.start, in.skipToken()) {
+ inBraces(Match(t, caseClauses()))
+ }
+ case _ =>
+ t
+ }
+
+ def ascription(t: Tree, location: Location.Value) = atPos(t.pos.start, in.skipToken()) {
+ in.token match {
+ case USCORE =>
+ val uscoreStart = in.skipToken()
+ if (isIdent(nme.raw.STAR)) {
+ in.nextToken()
+ if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), uscoreStart)
+ Typed(t, atPos(uscoreStart) { Ident(tpnme.WILDCARD_STAR) })
+ } else {
+ syntaxErrorOrIncomplete(IncorrectRepeatedParameterSyntax())
+ t
+ }
+ case AT if location != Location.InPattern =>
+ (t /: annotations())(Annotated)
+ case _ =>
+ val tpt = typeDependingOn(location)
+ if (isWildcard(t) && location != Location.InPattern) {
+ val vd :: rest = placeholderParams
+ placeholderParams =
+ cpy.ValDef(vd)(tpt = tpt).withPos(vd.pos union tpt.pos) :: rest
+ }
+ Typed(t, tpt)
+ }
+ }
+
+ /** Expr ::= implicit Id `=>' Expr
+ * BlockResult ::= implicit Id [`:' InfixType] `=>' Block
+ */
+ def implicitClosure(start: Int, location: Location.Value, implicitMod: Option[Mod] = None): Tree = {
+ var mods = atPos(start) { Modifiers(Implicit) }
+ if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
+ val id = termIdent()
+ val paramExpr =
+ if (location == Location.InBlock && in.token == COLON)
+ atPos(id.pos.start, in.skipToken()) { Typed(id, infixType()) }
+ else
+ id
+ closureRest(start, location, convertToParam(paramExpr, mods) :: Nil)
+ }
+
+ def closureRest(start: Int, location: Location.Value, params: List[Tree]): Tree =
+ atPos(start, in.offset) {
+ accept(ARROW)
+ Function(params, if (location == Location.InBlock) block() else expr())
+ }
+
+ /** PostfixExpr ::= InfixExpr [Id [nl]]
+ * InfixExpr ::= PrefixExpr
+ * | InfixExpr Id [nl] InfixExpr
+ */
+ def postfixExpr(): Tree =
+ infixOps(prefixExpr(), canStartExpressionTokens, prefixExpr, maybePostfix = true)
+
+ /** PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
+ */
+ val prefixExpr = () =>
+ if (isIdent && nme.raw.isUnary(in.name)) {
+ val start = in.offset
+ val name = ident()
+ if (name == nme.raw.MINUS && isNumericLit)
+ simpleExprRest(literal(start), canApply = true)
+ else
+ atPos(start) { PrefixOp(name, simpleExpr()) }
+ }
+ else simpleExpr()
+
+ /** SimpleExpr ::= new Template
+ * | BlockExpr
+ * | SimpleExpr1 [`_']
+ * SimpleExpr1 ::= literal
+ * | xmlLiteral
+ * | Path
+ * | `(' [ExprsInParens] `)'
+ * | SimpleExpr `.' Id
+ * | SimpleExpr (TypeArgs | NamedTypeArgs)
+ * | SimpleExpr1 ArgumentExprs
+ */
+ def simpleExpr(): Tree = {
+ var canApply = true
+ val t = in.token match {
+ case XMLSTART =>
+ xmlLiteral()
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
+ path(thisOK = true)
+ case USCORE =>
+ val start = in.skipToken()
+ val pname = ctx.freshName(nme.USCORE_PARAM_PREFIX).toTermName
+ val param = ValDef(pname, TypeTree(), EmptyTree).withFlags(SyntheticTermParam)
+ .withPos(Position(start))
+ placeholderParams = param :: placeholderParams
+ atPos(start) { Ident(pname) }
+ case LPAREN =>
+ atPos(in.offset) { makeTupleOrParens(inParens(exprsInParensOpt())) }
+ case LBRACE =>
+ canApply = false
+ blockExpr()
+ case NEW =>
+ canApply = false
+ val start = in.skipToken()
+ val (impl, missingBody) = template(emptyConstructor)
+ impl.parents match {
+ case parent :: Nil if missingBody =>
+ if (parent.isType) ensureApplied(wrapNew(parent)) else parent
+ case _ =>
+ New(impl.withPos(Position(start, in.lastOffset)))
+ }
+ case _ =>
+ if (isLiteral) literal()
+ else {
+ syntaxErrorOrIncomplete(IllegalStartSimpleExpr(tokenString(in.token)))
+ errorTermTree
+ }
+ }
+ simpleExprRest(t, canApply)
+ }
+
+ def simpleExprRest(t: Tree, canApply: Boolean = true): Tree = {
+ if (canApply) newLineOptWhenFollowedBy(LBRACE)
+ in.token match {
+ case DOT =>
+ in.nextToken()
+ simpleExprRest(selector(t), canApply = true)
+ case LBRACKET =>
+ val tapp = atPos(t.pos.start, in.offset) { TypeApply(t, typeArgs(namedOK = true)) }
+ simpleExprRest(tapp, canApply = true)
+ case LPAREN | LBRACE if canApply =>
+ val app = atPos(t.pos.start, in.offset) { Apply(t, argumentExprs()) }
+ simpleExprRest(app, canApply = true)
+ case USCORE =>
+ atPos(t.pos.start, in.skipToken()) { PostfixOp(t, nme.WILDCARD) }
+ case _ =>
+ t
+ }
+ }
+
+ /** ExprsInParens ::= ExprInParens {`,' ExprInParens}
+ */
+ def exprsInParensOpt(): List[Tree] =
+ if (in.token == RPAREN) Nil else commaSeparated(exprInParens)
+
+ /** ParArgumentExprs ::= `(' [ExprsInParens] `)'
+ * | `(' [ExprsInParens `,'] PostfixExpr `:' `_' `*' ')' \
+ */
+ def parArgumentExprs(): List[Tree] =
+ inParens(if (in.token == RPAREN) Nil else commaSeparated(argumentExpr))
+
+ /** ArgumentExprs ::= ParArgumentExprs
+ * | [nl] BlockExpr
+ */
+ def argumentExprs(): List[Tree] =
+ if (in.token == LBRACE) blockExpr() :: Nil else parArgumentExprs()
+
+ val argumentExpr = () => exprInParens() match {
+ case a @ Assign(Ident(id), rhs) => cpy.NamedArg(a)(id, rhs)
+ case e => e
+ }
+
+ /** ArgumentExprss ::= {ArgumentExprs}
+ */
+ def argumentExprss(fn: Tree): Tree = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LPAREN || in.token == LBRACE) argumentExprss(Apply(fn, argumentExprs()))
+ else fn
+ }
+
+ /** ParArgumentExprss ::= {ParArgumentExprs}
+ */
+ def parArgumentExprss(fn: Tree): Tree =
+ if (in.token == LPAREN) parArgumentExprss(Apply(fn, parArgumentExprs()))
+ else fn
+
+ /** BlockExpr ::= `{' (CaseClauses | Block) `}'
+ */
+ def blockExpr(): Tree = atPos(in.offset) {
+ inDefScopeBraces {
+ if (in.token == CASE) Match(EmptyTree, caseClauses())
+ else block()
+ }
+ }
+
+ /** Block ::= BlockStatSeq
+ * @note Return tree does not carry source position.
+ */
+ def block(): Tree = {
+ val stats = blockStatSeq()
+ def isExpr(stat: Tree) = !(stat.isDef || stat.isInstanceOf[Import])
+ if (stats.nonEmpty && isExpr(stats.last)) Block(stats.init, stats.last)
+ else Block(stats, EmptyTree)
+ }
+
+ /** Guard ::= if PostfixExpr
+ */
+ def guard(): Tree =
+ if (in.token == IF) { in.nextToken(); postfixExpr() }
+ else EmptyTree
+
+ /** Enumerators ::= Generator {semi Enumerator | Guard}
+ */
+ def enumerators(): List[Tree] = generator() :: enumeratorsRest()
+
+ def enumeratorsRest(): List[Tree] =
+ if (isStatSep) { in.nextToken(); enumerator() :: enumeratorsRest() }
+ else if (in.token == IF) guard() :: enumeratorsRest()
+ else Nil
+
+ /** Enumerator ::= Generator
+ * | Guard
+ * | Pattern1 `=' Expr
+ */
+ def enumerator(): Tree =
+ if (in.token == IF) guard()
+ else {
+ val pat = pattern1()
+ if (in.token == EQUALS) atPos(pat.pos.start, in.skipToken()) { GenAlias(pat, expr()) }
+ else generatorRest(pat)
+ }
+
+ /** Generator ::= Pattern `<-' Expr
+ */
+ def generator(): Tree = generatorRest(pattern1())
+
+ def generatorRest(pat: Tree) =
+ atPos(pat.pos.start, accept(LARROW)) { GenFrom(pat, expr()) }
+
+ /** ForExpr ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
+ * {nl} [`yield'] Expr
+ * | `for' Enumerators (`do' Expr | `yield' Expr)
+ */
+ def forExpr(): Tree = atPos(in.skipToken()) {
+ var wrappedEnums = true
+ val enums =
+ if (in.token == LBRACE) inBraces(enumerators())
+ else if (in.token == LPAREN) {
+ val lparenOffset = in.skipToken()
+ openParens.change(LPAREN, 1)
+ val pats = patternsOpt()
+ val pat =
+ if (in.token == RPAREN || pats.length > 1) {
+ wrappedEnums = false
+ accept(RPAREN)
+ openParens.change(LPAREN, -1)
+ atPos(lparenOffset) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
+ }
+ else pats.head
+ val res = generatorRest(pat) :: enumeratorsRest()
+ if (wrappedEnums) {
+ accept(RPAREN)
+ openParens.change(LPAREN, -1)
+ }
+ res
+ } else {
+ wrappedEnums = false
+ enumerators()
+ }
+ newLinesOpt()
+ if (in.token == YIELD) { in.nextToken(); ForYield(enums, expr()) }
+ else if (in.token == DO) { in.nextToken(); ForDo(enums, expr()) }
+ else {
+ if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension())
+ ForDo(enums, expr())
+ }
+ }
+
+ /** CaseClauses ::= CaseClause {CaseClause}
+ */
+ def caseClauses(): List[CaseDef] = {
+ val buf = new ListBuffer[CaseDef]
+ buf += caseClause()
+ while (in.token == CASE) buf += caseClause()
+ buf.toList
+ }
+
+ /** CaseClause ::= case Pattern [Guard] `=>' Block
+ */
+ def caseClause(): CaseDef = atPos(in.offset) {
+ accept(CASE)
+ CaseDef(pattern(), guard(), atPos(accept(ARROW)) { block() })
+ }
+
+ /* -------- PATTERNS ------------------------------------------- */
+
+ /** Pattern ::= Pattern1 { `|' Pattern1 }
+ */
+ val pattern = () => {
+ val pat = pattern1()
+ if (isIdent(nme.raw.BAR))
+ atPos(pat.pos.start) { Alternative(pat :: patternAlts()) }
+ else pat
+ }
+
+ def patternAlts(): List[Tree] =
+ if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1() :: patternAlts() }
+ else Nil
+
+ /** Pattern1 ::= PatVar Ascription
+ * | Pattern2
+ */
+ def pattern1(): Tree = {
+ val p = pattern2()
+ if (isVarPattern(p) && in.token == COLON) ascription(p, Location.InPattern)
+ else p
+ }
+
+ /** Pattern2 ::= [varid `@'] InfixPattern
+ */
+ val pattern2 = () => infixPattern() match {
+ case p @ Ident(name) if isVarPattern(p) && in.token == AT =>
+ val offset = in.skipToken()
+
+ // compatibility for Scala2 `x @ _*` syntax
+ infixPattern() match {
+ case pt @ Ident(tpnme.WILDCARD_STAR) =>
+ migrationWarningOrError("The syntax `x @ _*' is no longer supported; use `x : _*' instead", p.pos.start)
+ atPos(p.pos.start, offset) { Typed(p, pt) }
+ case p =>
+ atPos(p.pos.start, offset) { Bind(name, p) }
+ }
+ case p @ Ident(tpnme.WILDCARD_STAR) =>
+ // compatibility for Scala2 `_*` syntax
+ migrationWarningOrError("The syntax `_*' is no longer supported; use `x : _*' instead", p.pos.start)
+ atPos(p.pos.start) { Typed(Ident(nme.WILDCARD), p) }
+ case p =>
+ p
+ }
+
+ /** InfixPattern ::= SimplePattern {Id [nl] SimplePattern}
+ */
+ def infixPattern(): Tree =
+ infixOps(simplePattern(), canStartExpressionTokens, simplePattern, notAnOperator = nme.raw.BAR)
+
+ /** SimplePattern ::= PatVar
+ * | Literal
+ * | XmlPattern
+ * | `(' [Patterns] `)'
+ * | SimplePattern1 [TypeArgs] [ArgumentPatterns]
+ * SimplePattern1 ::= Path
+ * | `{' Block `}'
+ * | SimplePattern1 `.' Id
+ * PatVar ::= Id
+ * | `_'
+ */
+ val simplePattern = () => in.token match {
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
+ path(thisOK = true) match {
+ case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(id.pos.start)
+ case t => simplePatternRest(t)
+ }
+ case USCORE =>
+ val wildIndent = wildcardIdent()
+
+ // compatibility for Scala2 `x @ _*` and `_*` syntax
+ // `x: _*' is parsed in `ascription'
+ if (isIdent(nme.raw.STAR)) {
+ in.nextToken()
+ if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), wildIndent.pos)
+ atPos(wildIndent.pos) { Ident(tpnme.WILDCARD_STAR) }
+ } else wildIndent
+ case LPAREN =>
+ atPos(in.offset) { makeTupleOrParens(inParens(patternsOpt())) }
+ case LBRACE =>
+ dotSelectors(blockExpr())
+ case XMLSTART =>
+ xmlLiteralPattern()
+ case _ =>
+ if (isLiteral) literal()
+ else {
+ syntaxErrorOrIncomplete(IllegalStartOfSimplePattern())
+ errorTermTree
+ }
+ }
+
+ def simplePatternRest(t: Tree): Tree = {
+ var p = t
+ if (in.token == LBRACKET)
+ p = atPos(t.pos.start, in.offset) { TypeApply(p, typeArgs()) }
+ if (in.token == LPAREN)
+ p = atPos(t.pos.start, in.offset) { Apply(p, argumentPatterns()) }
+ p
+ }
+
+ /** Patterns ::= Pattern [`,' Pattern]
+ */
+ def patterns() = commaSeparated(pattern)
+
+ def patternsOpt(): List[Tree] =
+ if (in.token == RPAREN) Nil else patterns()
+
+
+ /** ArgumentPatterns ::= `(' [Patterns] `)'
+ * | `(' [Patterns `,'] Pattern2 `:' `_' `*' ')
+ */
+ def argumentPatterns(): List[Tree] =
+ inParens(patternsOpt)
+
+/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
+
+ private def modOfToken(tok: Int): Mod = tok match {
+ case ABSTRACT => Mod.Abstract()
+ case FINAL => Mod.Final()
+ case IMPLICIT => Mod.Implicit(ImplicitCommon)
+ case INLINE => Mod.Inline()
+ case LAZY => Mod.Lazy()
+ case OVERRIDE => Mod.Override()
+ case PRIVATE => Mod.Private()
+ case PROTECTED => Mod.Protected()
+ case SEALED => Mod.Sealed()
+ }
+
+ /** Drop `private' modifier when followed by a qualifier.
+ * Contract `abstract' and `override' to ABSOVERRIDE
+ */
+ private def normalize(mods: Modifiers): Modifiers =
+ if ((mods is Private) && mods.hasPrivateWithin)
+ normalize(mods &~ Private)
+ else if (mods is AbstractAndOverride)
+ normalize(addFlag(mods &~ (Abstract | Override), AbsOverride))
+ else
+ mods
+
+ private def addModifier(mods: Modifiers): Modifiers = {
+ val tok = in.token
+ val mod = atPos(in.skipToken()) { modOfToken(tok) }
+
+ if (mods is mod.flags) syntaxError(RepeatedModifier(mod.flags.toString))
+ addMod(mods, mod)
+ }
+
+ private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = (
+ flags1.isEmpty
+ || flags2.isEmpty
+ || flags1.isTermFlags && flags2.isTermFlags
+ || flags1.isTypeFlags && flags2.isTypeFlags
+ )
+
+ def addFlag(mods: Modifiers, flag: FlagSet): Modifiers = {
+ def incompatible(kind: String) = {
+ syntaxError(s"modifier(s) `${mods.flags}' not allowed for $kind")
+ Modifiers(flag)
+ }
+ if (compatible(mods.flags, flag)) mods | flag
+ else flag match {
+ case Trait => incompatible("trait")
+ case Method => incompatible("method")
+ case Mutable => incompatible("variable")
+ case _ =>
+ syntaxError(s"illegal modifier combination: ${mods.flags} and $flag")
+ mods
+ }
+ }
+
+ /** Always add the syntactic `mod`, but check and conditionally add semantic `mod.flags`
+ */
+ def addMod(mods: Modifiers, mod: Mod): Modifiers =
+ addFlag(mods, mod.flags).withAddedMod(mod)
+
+ /** AccessQualifier ::= "[" (Id | this) "]"
+ */
+ def accessQualifierOpt(mods: Modifiers): Modifiers =
+ if (in.token == LBRACKET) {
+ if ((mods is Local) || mods.hasPrivateWithin)
+ syntaxError("duplicate private/protected qualifier")
+ inBrackets {
+ if (in.token == THIS) { in.nextToken(); mods | Local }
+ else mods.withPrivateWithin(ident().toTypeName)
+ }
+ } else mods
+
+ /** {Annotation} {Modifier}
+ * Modifiers ::= {Modifier}
+ * LocalModifiers ::= {LocalModifier}
+ * AccessModifier ::= (private | protected) [AccessQualifier]
+ * Modifier ::= LocalModifier
+ * | AccessModifier
+ * | override
+ * LocalModifier ::= abstract | final | sealed | implicit | lazy
+ */
+ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = {
+ def loop(mods: Modifiers): Modifiers = {
+ if (allowed contains in.token) {
+ val isAccessMod = accessModifierTokens contains in.token
+ val mods1 = addModifier(mods)
+ loop(if (isAccessMod) accessQualifierOpt(mods1) else mods1)
+ } else if (in.token == NEWLINE && (mods.hasFlags || mods.hasAnnotations)) {
+ in.nextToken()
+ loop(mods)
+ } else {
+ mods
+ }
+ }
+ normalize(loop(start))
+ }
+
+ /** Wrap annotation or constructor in New(...).<init> */
+ def wrapNew(tpt: Tree) = Select(New(tpt), nme.CONSTRUCTOR)
+
+ /** Adjust start of annotation or constructor to position of preceding @ or new */
+ def adjustStart(start: Offset)(tree: Tree): Tree = {
+ val tree1 = tree match {
+ case Apply(fn, args) => cpy.Apply(tree)(adjustStart(start)(fn), args)
+ case Select(qual, name) => cpy.Select(tree)(adjustStart(start)(qual), name)
+ case _ => tree
+ }
+ if (start < tree1.pos.start) tree1.withPos(tree1.pos.withStart(start))
+ else tree1
+ }
+
+ /** Annotation ::= `@' SimpleType {ParArgumentExprs}
+ */
+ def annot() =
+ adjustStart(accept(AT)) {
+ if (in.token == INLINE) in.token = BACKQUOTED_IDENT // allow for now
+ ensureApplied(parArgumentExprss(wrapNew(simpleType())))
+ }
+
+ def annotations(skipNewLines: Boolean = false): List[Tree] = {
+ if (skipNewLines) newLineOptWhenFollowedBy(AT)
+ if (in.token == AT) annot() :: annotations(skipNewLines)
+ else Nil
+ }
+
+ def annotsAsMods(skipNewLines: Boolean = false): Modifiers =
+ Modifiers() withAnnotations annotations(skipNewLines)
+
+ def defAnnotsMods(allowed: BitSet): Modifiers =
+ modifiers(allowed, annotsAsMods(skipNewLines = true))
+
+ /* -------- PARAMETERS ------------------------------------------- */
+
+ /** ClsTypeParamClause::= `[' ClsTypeParam {`,' ClsTypeParam} `]'
+ * ClsTypeParam ::= {Annotation} [{Modifier} type] [`+' | `-']
+ * Id [HkTypeParamClause] TypeParamBounds
+ *
+ * DefTypeParamClause::= `[' DefTypeParam {`,' DefTypeParam} `]'
+ * DefTypeParam ::= {Annotation} Id [HkTypeParamClause] TypeParamBounds
+ *
+ * TypTypeParamCaluse::= `[' TypTypeParam {`,' TypTypeParam} `]'
+ * TypTypeParam ::= {Annotation} Id [HkTypePamClause] TypeBounds
+ *
+ * HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
+ * HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypePamClause] | _') TypeBounds
+ */
+ def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
+ def typeParam(): TypeDef = {
+ val isConcreteOwner = ownerKind == ParamOwner.Class || ownerKind == ParamOwner.Def
+ val start = in.offset
+ var mods = annotsAsMods()
+ if (ownerKind == ParamOwner.Class) {
+ mods = modifiers(start = mods)
+ mods =
+ atPos(start, in.offset) {
+ if (in.token == TYPE) {
+ val mod = atPos(in.skipToken()) { Mod.Type() }
+ (mods | Param | ParamAccessor).withAddedMod(mod)
+ } else {
+ if (mods.hasFlags) syntaxError(TypeParamsTypeExpected(mods, ident()))
+ mods | Param | PrivateLocal
+ }
+ }
+ }
+ else mods = atPos(start) (mods | Param)
+ if (ownerKind != ParamOwner.Def) {
+ if (isIdent(nme.raw.PLUS)) mods |= Covariant
+ else if (isIdent(nme.raw.MINUS)) mods |= Contravariant
+ if (mods is VarianceFlags) in.nextToken()
+ }
+ atPos(start, nameStart) {
+ val name =
+ if (isConcreteOwner || in.token != USCORE) ident().toTypeName
+ else {
+ in.nextToken()
+ ctx.freshName(nme.USCORE_PARAM_PREFIX).toTypeName
+ }
+ val hkparams = typeParamClauseOpt(ParamOwner.TypeParam)
+ val bounds =
+ if (isConcreteOwner) typeParamBounds(name)
+ else typeBounds()
+ TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods)
+ }
+ }
+ commaSeparated(typeParam)
+ }
+
+ def typeParamClauseOpt(ownerKind: ParamOwner.Value): List[TypeDef] =
+ if (in.token == LBRACKET) typeParamClause(ownerKind) else Nil
+
+ /** ClsParamClauses ::= {ClsParamClause} [[nl] `(' `implicit' ClsParams `)']
+ * ClsParamClause ::= [nl] `(' [ClsParams] ')'
+ * ClsParams ::= ClsParam {`' ClsParam}
+ * ClsParam ::= {Annotation} [{Modifier} (`val' | `var') | `inline'] Param
+ * DefParamClauses ::= {DefParamClause} [[nl] `(' `implicit' DefParams `)']
+ * DefParamClause ::= [nl] `(' [DefParams] ')'
+ * DefParams ::= DefParam {`,' DefParam}
+ * DefParam ::= {Annotation} [`inline'] Param
+ * Param ::= id `:' ParamType [`=' Expr]
+ */
+ def paramClauses(owner: Name, ofCaseClass: Boolean = false): List[List[ValDef]] = {
+ var implicitMod: Mod = null
+ var firstClauseOfCaseClass = ofCaseClass
+ var implicitOffset = -1 // use once
+ def param(): ValDef = {
+ val start = in.offset
+ var mods = annotsAsMods()
+ if (owner.isTypeName) {
+ mods = modifiers(start = mods) | ParamAccessor
+ mods =
+ atPos(start, in.offset) {
+ if (in.token == VAL) {
+ val mod = atPos(in.skipToken()) { Mod.Val() }
+ mods.withAddedMod(mod)
+ } else if (in.token == VAR) {
+ val mod = atPos(in.skipToken()) { Mod.Var() }
+ addMod(mods, mod)
+ } else {
+ if (!(mods.flags &~ (ParamAccessor | Inline)).isEmpty)
+ syntaxError("`val' or `var' expected")
+ if (firstClauseOfCaseClass) mods else mods | PrivateLocal
+ }
+ }
+ }
+ else {
+ if (in.token == INLINE) mods = addModifier(mods)
+ mods = atPos(start) { mods | Param }
+ }
+ atPos(start, nameStart) {
+ val name = ident()
+ val tpt =
+ if (ctx.settings.YmethodInfer.value && owner.isTermName && in.token != COLON) {
+ TypeTree() // XX-METHOD-INFER
+ } else {
+ accept(COLON)
+ if (in.token == ARROW) {
+ if (owner.isTypeName && !(mods is Local))
+ syntaxError(s"${if (mods is Mutable) "`var'" else "`val'"} parameters may not be call-by-name")
+ else if (implicitMod != null)
+ syntaxError("implicit parameters may not be call-by-name")
+ }
+ paramType()
+ }
+ val default =
+ if (in.token == EQUALS) { in.nextToken(); expr() }
+ else EmptyTree
+ if (implicitOffset >= 0) {
+ mods = mods.withPos(mods.pos.union(Position(implicitOffset, implicitOffset)))
+ implicitOffset = -1
+ }
+ if (implicitMod != null) mods = addMod(mods, implicitMod)
+ ValDef(name, tpt, default).withMods(mods)
+ }
+ }
+ def paramClause(): List[ValDef] = inParens {
+ if (in.token == RPAREN) Nil
+ else {
+ if (in.token == IMPLICIT) {
+ implicitOffset = in.offset
+ implicitMod = atPos(in.skipToken()) { Mod.Implicit(Implicit) }
+ }
+ commaSeparated(param)
+ }
+ }
+ def clauses(): List[List[ValDef]] = {
+ newLineOptWhenFollowedBy(LPAREN)
+ if (in.token == LPAREN)
+ paramClause() :: {
+ firstClauseOfCaseClass = false
+ if (implicitMod == null) clauses() else Nil
+ }
+ else Nil
+ }
+ val start = in.offset
+ val result = clauses()
+ if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods is Implicit)))) {
+ in.token match {
+ case LBRACKET => syntaxError("no type parameters allowed here")
+ case EOF => incompleteInputError(AuxConstructorNeedsNonImplicitParameter())
+ case _ => syntaxError(AuxConstructorNeedsNonImplicitParameter(), start)
+ }
+ }
+ result
+ }
+
+/* -------- DEFS ------------------------------------------- */
+
+ /** Import ::= import ImportExpr {`,' ImportExpr}
+ */
+ def importClause(): List[Tree] = {
+ val offset = accept(IMPORT)
+ commaSeparated(importExpr) match {
+ case t :: rest =>
+ // The first import should start at the position of the keyword.
+ t.withPos(t.pos.withStart(offset)) :: rest
+ case nil => nil
+ }
+ }
+
+ /** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ */
+ val importExpr = () => path(thisOK = false, handleImport) match {
+ case imp: Import =>
+ imp
+ case sel @ Select(qual, name) =>
+ val selector = atPos(sel.pos.point) { Ident(name) }
+ cpy.Import(sel)(qual, selector :: Nil)
+ case t =>
+ accept(DOT)
+ Import(t, Ident(nme.WILDCARD) :: Nil)
+ }
+
+ val handleImport = { tree: Tree =>
+ if (in.token == USCORE) Import(tree, importSelector() :: Nil)
+ else if (in.token == LBRACE) Import(tree, inBraces(importSelectors()))
+ else tree
+ }
+
+ /** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
+ */
+ def importSelectors(): List[Tree] =
+ if (in.token == RBRACE) Nil
+ else {
+ val sel = importSelector()
+ sel :: {
+ if (!isWildcardArg(sel) && in.token == COMMA) {
+ in.nextToken()
+ importSelectors()
+ }
+ else Nil
+ }
+ }
+
+ /** ImportSelector ::= Id [`=>' Id | `=>' `_']
+ */
+ def importSelector(): Tree = {
+ val from = termIdentOrWildcard()
+ if (from.name != nme.WILDCARD && in.token == ARROW)
+ atPos(from.pos.start, in.skipToken()) {
+ Thicket(from, termIdentOrWildcard())
+ }
+ else from
+ }
+
+ def posMods(start: Int, mods: Modifiers) = {
+ val mods1 = atPos(start)(mods)
+ in.nextToken()
+ mods1
+ }
+
+ /** Def ::= val PatDef
+ * | var VarDef
+ * | def DefDef
+ * | type {nl} TypeDcl
+ * | TmplDef
+ * Dcl ::= val ValDcl
+ * | var ValDcl
+ * | def DefDcl
+ * | type {nl} TypeDcl
+ */
+ def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
+ case VAL =>
+ val mod = atPos(in.skipToken()) { Mod.Val() }
+ val mods1 = mods.withAddedMod(mod)
+ patDefOrDcl(start, mods1, in.getDocComment(start))
+ case VAR =>
+ val mod = atPos(in.skipToken()) { Mod.Var() }
+ val mod1 = addMod(mods, mod)
+ patDefOrDcl(start, mod1, in.getDocComment(start))
+ case DEF =>
+ defDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
+ case TYPE =>
+ typeDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
+ case _ =>
+ tmplDef(start, mods)
+ }
+
+ /** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
+ * VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
+ * ValDcl ::= Id {`,' Id} `:' Type
+ * VarDcl ::= Id {`,' Id} `:' Type
+ */
+ def patDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
+ val lhs = commaSeparated(pattern2)
+ val tpt = typedOpt()
+ val rhs =
+ if (tpt.isEmpty || in.token == EQUALS) {
+ accept(EQUALS)
+ if (in.token == USCORE && !tpt.isEmpty && (mods is Mutable) &&
+ (lhs.toList forall (_.isInstanceOf[Ident]))) {
+ wildcardIdent()
+ } else {
+ expr()
+ }
+ } else EmptyTree
+ lhs match {
+ case (id @ Ident(name: TermName)) :: Nil => {
+ ValDef(name, tpt, rhs).withMods(mods).setComment(docstring)
+ } case _ =>
+ PatDef(mods, lhs, tpt, rhs)
+ }
+ }
+
+ /** DefDef ::= DefSig (`:' Type [`=' Expr] | "=" Expr)
+ * | this ParamClause ParamClauses `=' ConstrExpr
+ * DefDcl ::= DefSig `:' Type
+ * DefSig ::= id [DefTypeParamClause] ParamClauses
+ */
+ def defDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
+ def scala2ProcedureSyntax(resultTypeStr: String) = {
+ val toInsert =
+ if (in.token == LBRACE) s"$resultTypeStr ="
+ else ": Unit " // trailing space ensures that `def f()def g()` works.
+ in.testScala2Mode(s"Procedure syntax no longer supported; `$toInsert' should be inserted here") && {
+ patch(source, Position(in.lastOffset), toInsert)
+ true
+ }
+ }
+ if (in.token == THIS) {
+ in.nextToken()
+ val vparamss = paramClauses(nme.CONSTRUCTOR)
+ val rhs = {
+ if (!(in.token == LBRACE && scala2ProcedureSyntax(""))) accept(EQUALS)
+ atPos(in.offset) { constrExpr() }
+ }
+ makeConstructor(Nil, vparamss, rhs).withMods(mods)
+ } else {
+ val mods1 = addFlag(mods, Method)
+ val name = ident()
+ val tparams = typeParamClauseOpt(ParamOwner.Def)
+ val vparamss = paramClauses(name)
+ var tpt = fromWithinReturnType(typedOpt())
+ val rhs =
+ if (in.token == EQUALS) {
+ in.nextToken()
+ expr
+ }
+ else if (!tpt.isEmpty)
+ EmptyTree
+ else if (scala2ProcedureSyntax(": Unit")) {
+ tpt = scalaUnit
+ if (in.token == LBRACE) expr()
+ else EmptyTree
+ }
+ else {
+ if (!isExprIntro) syntaxError(MissingReturnType(), in.lastOffset)
+ accept(EQUALS)
+ expr()
+ }
+ DefDef(name, tparams, vparamss, tpt, rhs).withMods(mods1).setComment(docstring)
+ }
+ }
+
+ /** ConstrExpr ::= SelfInvocation
+ * | ConstrBlock
+ */
+ def constrExpr(): Tree =
+ if (in.token == LBRACE) constrBlock()
+ else Block(selfInvocation() :: Nil, Literal(Constant(())))
+
+ /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
+ */
+ def selfInvocation(): Tree =
+ atPos(accept(THIS)) {
+ newLineOptWhenFollowedBy(LBRACE)
+ argumentExprss(Apply(Ident(nme.CONSTRUCTOR), argumentExprs()))
+ }
+
+ /** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
+ */
+ def constrBlock(): Tree =
+ atPos(in.skipToken()) {
+ val stats = selfInvocation() :: {
+ if (isStatSep) { in.nextToken(); blockStatSeq() }
+ else Nil
+ }
+ accept(RBRACE)
+ Block(stats, Literal(Constant(())))
+ }
+
+ /** TypeDef ::= type Id [TypeParamClause] `=' Type
+ * TypeDcl ::= type Id [TypeParamClause] TypeBounds
+ */
+ def typeDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = {
+ newLinesOpt()
+ atPos(start, nameStart) {
+ val name = ident().toTypeName
+ val tparams = typeParamClauseOpt(ParamOwner.Type)
+ in.token match {
+ case EQUALS =>
+ in.nextToken()
+ TypeDef(name, lambdaAbstract(tparams, typ())).withMods(mods).setComment(docstring)
+ case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF =>
+ TypeDef(name, lambdaAbstract(tparams, typeBounds())).withMods(mods).setComment(docstring)
+ case _ =>
+ syntaxErrorOrIncomplete("`=', `>:', or `<:' expected")
+ EmptyTree
+ }
+ }
+ }
+
+ /** TmplDef ::= ([`case'] `class' | `trait') ClassDef
+ * | [`case'] `object' ObjectDef
+ */
+ def tmplDef(start: Int, mods: Modifiers): Tree = {
+ val docstring = in.getDocComment(start)
+ in.token match {
+ case TRAIT =>
+ classDef(start, posMods(start, addFlag(mods, Trait)), docstring)
+ case CLASS =>
+ classDef(start, posMods(start, mods), docstring)
+ case CASECLASS =>
+ classDef(start, posMods(start, mods | Case), docstring)
+ case OBJECT =>
+ objectDef(start, posMods(start, mods | Module), docstring)
+ case CASEOBJECT =>
+ objectDef(start, posMods(start, mods | Case | Module), docstring)
+ case _ =>
+ syntaxErrorOrIncomplete("expected start of definition")
+ EmptyTree
+ }
+ }
+
+ /** ClassDef ::= Id [ClsTypeParamClause]
+ * [ConstrMods] ClsParamClauses TemplateOpt
+ */
+ def classDef(start: Offset, mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(start, nameStart) {
+ val name = ident().toTypeName
+ val constr = atPos(in.lastOffset) {
+ val tparams = typeParamClauseOpt(ParamOwner.Class)
+ val cmods = constrModsOpt()
+ val vparamss = paramClauses(name, mods is Case)
+
+ makeConstructor(tparams, vparamss).withMods(cmods)
+ }
+ val templ = templateOpt(constr)
+
+ TypeDef(name, templ).withMods(mods).setComment(docstring)
+ }
+
+ /** ConstrMods ::= AccessModifier
+ * | Annotation {Annotation} (AccessModifier | `this')
+ */
+ def constrModsOpt(): Modifiers = {
+ val mods = modifiers(accessModifierTokens, annotsAsMods())
+ if (mods.hasAnnotations && !mods.hasFlags)
+ if (in.token == THIS) in.nextToken()
+ else syntaxError("`private', `protected', or `this' expected")
+ mods
+ }
+
+ /** ObjectDef ::= Id TemplateOpt
+ */
+ def objectDef(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = atPos(start, nameStart) {
+ val name = ident()
+ val template = templateOpt(emptyConstructor)
+
+ ModuleDef(name, template).withMods(mods).setComment(docstring)
+ }
+
+/* -------- TEMPLATES ------------------------------------------- */
+
+ /** ConstrApp ::= SimpleType {ParArgumentExprs}
+ */
+ val constrApp = () => {
+ val t = annotType()
+ if (in.token == LPAREN) parArgumentExprss(wrapNew(t))
+ else t
+ }
+
+ /** Template ::= ConstrApps [TemplateBody] | TemplateBody
+ * ConstrApps ::= ConstrApp {`with' ConstrApp}
+ *
+ * @return a pair consisting of the template, and a boolean which indicates
+ * whether the template misses a body (i.e. no {...} part).
+ */
+ def template(constr: DefDef): (Template, Boolean) = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LBRACE) (templateBodyOpt(constr, Nil), false)
+ else {
+ val parents = tokenSeparated(WITH, constrApp)
+ newLineOptWhenFollowedBy(LBRACE)
+ val missingBody = in.token != LBRACE
+ (templateBodyOpt(constr, parents), missingBody)
+ }
+ }
+
+ /** TemplateOpt = [`extends' Template | TemplateBody]
+ */
+ def templateOpt(constr: DefDef): Template =
+ if (in.token == EXTENDS) { in.nextToken(); template(constr)._1 }
+ else {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LBRACE) template(constr)._1
+ else Template(constr, Nil, EmptyValDef, Nil)
+ }
+
+ /** TemplateBody ::= [nl] `{' TemplateStatSeq `}'
+ */
+ def templateBodyOpt(constr: DefDef, parents: List[Tree]) = {
+ val (self, stats) =
+ if (in.token == LBRACE) templateBody() else (EmptyValDef, Nil)
+ Template(constr, parents, self, stats)
+ }
+
+ def templateBody(): (ValDef, List[Tree]) = {
+ val r = inDefScopeBraces { templateStatSeq() }
+ if (in.token == WITH) {
+ syntaxError(EarlyDefinitionsNotSupported())
+ in.nextToken()
+ template(emptyConstructor)
+ }
+ r
+ }
+
+/* -------- STATSEQS ------------------------------------------- */
+
+ /** Create a tree representing a packaging */
+ def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
+ }
+
+ /** Packaging ::= package QualId [nl] `{' TopStatSeq `}'
+ */
+ def packaging(start: Int): Tree = {
+ val pkg = qualId()
+ newLineOptWhenFollowedBy(LBRACE)
+ val stats = inDefScopeBraces(topStatSeq)
+ makePackaging(start, pkg, stats)
+ }
+
+ /** TopStatSeq ::= TopStat {semi TopStat}
+ * TopStat ::= Annotations Modifiers TmplDef
+ * | Packaging
+ * | package object objectDef
+ * | Import
+ * |
+ */
+ def topStatSeq(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ while (!isStatSeqEnd) {
+ setLastStatOffset()
+ if (in.token == PACKAGE) {
+ val start = in.skipToken()
+ if (in.token == OBJECT)
+ stats += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) })
+ else stats += packaging(start)
+ }
+ else if (in.token == IMPORT)
+ stats ++= importClause()
+ else if (in.token == AT || isTemplateIntro || isModifier)
+ stats += tmplDef(in.offset, defAnnotsMods(modifierTokens))
+ else if (!isStatSep) {
+ if (in.token == CASE)
+ syntaxErrorOrIncomplete("only `case class` or `case object` allowed")
+ else
+ syntaxErrorOrIncomplete("expected class or object definition")
+ if (mustStartStat) // do parse all definitions even if they are probably local (i.e. a "}" has been forgotten)
+ defOrDcl(in.offset, defAnnotsMods(modifierTokens))
+ }
+ acceptStatSepUnlessAtEnd()
+ }
+ stats.toList
+ }
+
+ /** TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ */
+ def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
+ var self: ValDef = EmptyValDef
+ val stats = new ListBuffer[Tree]
+ if (isExprIntro) {
+ val first = expr1()
+ if (in.token == ARROW) {
+ first match {
+ case Typed(tree @ This(EmptyTypeIdent), tpt) =>
+ self = makeSelfDef(nme.WILDCARD, tpt).withPos(first.pos)
+ case _ =>
+ val ValDef(name, tpt, _) = convertToParam(first, expected = "self type clause")
+ if (name != nme.ERROR)
+ self = makeSelfDef(name, tpt).withPos(first.pos)
+ }
+ in.nextToken()
+ } else {
+ stats += first
+ acceptStatSepUnlessAtEnd()
+ }
+ }
+ var exitOnError = false
+ while (!isStatSeqEnd && !exitOnError) {
+ setLastStatOffset()
+ if (in.token == IMPORT)
+ stats ++= importClause()
+ else if (isExprIntro)
+ stats += expr1()
+ else if (isDefIntro(modifierTokens))
+ stats += defOrDcl(in.offset, defAnnotsMods(modifierTokens))
+ else if (!isStatSep) {
+ exitOnError = mustStartStat
+ syntaxErrorOrIncomplete("illegal start of definition")
+ }
+ acceptStatSepUnlessAtEnd()
+ }
+ (self, if (stats.isEmpty) List(EmptyTree) else stats.toList)
+ }
+
+ /** RefineStatSeq ::= RefineStat {semi RefineStat}
+ * RefineStat ::= Dcl
+ * |
+ * (in reality we admit Defs and filter them out afterwards)
+ */
+ def refineStatSeq(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ while (!isStatSeqEnd) {
+ if (isDclIntro) {
+ stats += defOrDcl(in.offset, Modifiers())
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete(
+ "illegal start of declaration" +
+ (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
+ else ""))
+ }
+ acceptStatSepUnlessAtEnd()
+ }
+ stats.toList
+ }
+
+ def localDef(start: Int, implicitFlag: FlagSet, implicitMod: Option[Mod] = None): Tree = {
+ var mods = addFlag(defAnnotsMods(localModifierTokens), implicitFlag)
+ if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
+ defOrDcl(start, mods)
+ }
+
+ /** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
+ * BlockStat ::= Import
+ * | Annotations [implicit] [lazy] Def
+ * | Annotations LocalModifiers TmplDef
+ * | Expr1
+ * |
+ */
+ def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
+ val stats = new ListBuffer[Tree]
+ var exitOnError = false
+ while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
+ setLastStatOffset()
+ if (in.token == IMPORT)
+ stats ++= importClause()
+ else if (isExprIntro)
+ stats += expr(Location.InBlock)
+ else if (isDefIntro(localModifierTokens))
+ if (in.token == IMPLICIT) {
+ val start = in.offset
+ val mod = atPos(in.skipToken()) { Mod.Implicit(ImplicitCommon) }
+ if (isIdent) stats += implicitClosure(start, Location.InBlock, Some(mod))
+ else stats += localDef(start, ImplicitCommon, Some(mod))
+ } else {
+ stats += localDef(in.offset, EmptyFlags)
+ }
+ else if (!isStatSep && (in.token != CASE)) {
+ exitOnError = mustStartStat
+ val addendum = if (isModifier) " (no modifiers allowed here)" else ""
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum)
+ }
+ acceptStatSepUnlessAtEnd(CASE)
+ }
+ stats.toList
+ }
+
+ /** CompilationUnit ::= {package QualId semi} TopStatSeq
+ */
+ def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ def topstats(): List[Tree] = {
+ val ts = new ListBuffer[Tree]
+ while (in.token == SEMI) in.nextToken()
+ val start = in.offset
+ if (in.token == PACKAGE) {
+ in.nextToken()
+ if (in.token == OBJECT) {
+ val docstring = in.getDocComment(start)
+ ts += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) }, docstring)
+ if (in.token != EOF) {
+ acceptStatSep()
+ ts ++= topStatSeq()
+ }
+ } else {
+ val pkg = qualId()
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == EOF)
+ ts += makePackaging(start, pkg, List())
+ else if (in.token == LBRACE) {
+ ts += inDefScopeBraces(makePackaging(start, pkg, topStatSeq()))
+ acceptStatSepUnlessAtEnd()
+ ts ++= topStatSeq()
+ }
+ else {
+ acceptStatSep()
+ ts += makePackaging(start, pkg, topstats())
+ }
+ }
+ }
+ else
+ ts ++= topStatSeq()
+
+ ts.toList
+ }
+
+ topstats() match {
+ case List(stat @ PackageDef(_, _)) => stat
+ case Nil => EmptyTree // without this case we'd get package defs without positions
+ case stats => PackageDef(Ident(nme.EMPTY_PACKAGE), stats)
+ }
+ }
+ }
+
+
+ class OutlineParser(source: SourceFile)(implicit ctx: Context) extends Parser(source) {
+
+ def skipBraces[T](body: T): T = {
+ accept(LBRACE)
+ var openBraces = 1
+ while (in.token != EOF && openBraces > 0) {
+ if (in.token == XMLSTART) xmlLiteral()
+ else {
+ if (in.token == LBRACE) openBraces += 1
+ else if (in.token == RBRACE) openBraces -= 1
+ in.nextToken()
+ }
+ }
+ body
+ }
+
+ override def blockExpr(): Tree = skipBraces(EmptyTree)
+
+ override def templateBody() = skipBraces((EmptyValDef, List(EmptyTree)))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
new file mode 100644
index 000000000..60003d098
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -0,0 +1,1014 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core.Names._, core.Contexts._, core.Decorators._, util.Positions._
+import core.StdNames._, core.Comments._
+import util.SourceFile
+import java.lang.Character.isDigit
+import scala.reflect.internal.Chars._
+import Tokens._
+import scala.annotation.{ switch, tailrec }
+import scala.collection.mutable
+import mutable.ListBuffer
+import Utility.isNameStart
+import rewrite.Rewrites.patch
+
+object Scanners {
+
+ /** Offset into source character array */
+ type Offset = Int
+
+ /** An undefined offset */
+ val NoOffset: Offset = -1
+
+ type Token = Int
+
+ trait TokenData {
+
+ /** the next token */
+ var token: Token = EMPTY
+
+ /** the offset of the first character of the current token */
+ var offset: Offset = 0
+
+ /** the offset of the character following the token preceding this one */
+ var lastOffset: Offset = 0
+
+ /** the name of an identifier */
+ var name: TermName = null
+
+ /** the string value of a literal */
+ var strVal: String = null
+
+ /** the base of a number */
+ var base: Int = 0
+
+ def copyFrom(td: TokenData) = {
+ this.token = td.token
+ this.offset = td.offset
+ this.lastOffset = td.lastOffset
+ this.name = td.name
+ this.strVal = td.strVal
+ this.base = td.base
+ }
+ }
+
+ abstract class ScannerCommon(source: SourceFile)(implicit ctx: Context) extends CharArrayReader with TokenData {
+ val buf = source.content
+
+ // Errors -----------------------------------------------------------------
+
+ /** the last error offset
+ */
+ var errOffset: Offset = NoOffset
+
+
+ /** Generate an error at the given offset */
+ def error(msg: String, off: Offset = offset) = {
+ ctx.error(msg, source atPos Position(off))
+ token = ERROR
+ errOffset = off
+ }
+
+ /** signal an error where the input ended in the middle of a token */
+ def incompleteInputError(msg: String): Unit = {
+ ctx.incompleteInputError(msg, source atPos Position(offset))
+ token = EOF
+ errOffset = offset
+ }
+
+ // Setting token data ----------------------------------------------------
+
+ /** A character buffer for literals
+ */
+ val litBuf = new StringBuilder
+
+ /** append Unicode character to "litBuf" buffer
+ */
+ protected def putChar(c: Char): Unit = litBuf.append(c)
+
+ /** Return buffer contents and clear */
+ def flushBuf(buf: StringBuilder): String = {
+ val str = buf.toString
+ buf.clear()
+ str
+ }
+
+ /** Clear buffer and set name and token */
+ def finishNamed(idtoken: Token = IDENTIFIER, target: TokenData = this): Unit = {
+ target.name = flushBuf(litBuf).toTermName
+ target.token = idtoken
+ if (idtoken == IDENTIFIER) {
+ val idx = target.name.start
+ target.token = toToken(idx)
+ }
+ }
+
+ def toToken(idx: Int): Token
+
+ /** Clear buffer and set string */
+ def setStrVal() =
+ strVal = flushBuf(litBuf)
+
+ /** Convert current strVal to char value
+ */
+ def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0
+
+ /** Convert current strVal, base to long value
+ * This is tricky because of max negative value.
+ */
+ def intVal(negated: Boolean): Long = {
+ if (token == CHARLIT && !negated) {
+ charVal
+ } else {
+ var value: Long = 0
+ val divider = if (base == 10) 1 else 2
+ val limit: Long =
+ if (token == LONGLIT) Long.MaxValue else Int.MaxValue
+ var i = 0
+ val len = strVal.length
+ while (i < len) {
+ val d = digit2int(strVal charAt i, base)
+ if (d < 0) {
+ error("malformed integer number")
+ return 0
+ }
+ if (value < 0 ||
+ limit / (base / divider) < value ||
+ limit - (d / divider) < value * (base / divider) &&
+ !(negated && limit == value * base - 1 + d)) {
+ error("integer number too large")
+ return 0
+ }
+ value = value * base + d
+ i += 1
+ }
+ if (negated) -value else value
+ }
+ }
+
+ def intVal: Long = intVal(false)
+
+ /** Convert current strVal, base to double value
+ */
+ def floatVal(negated: Boolean): Double = {
+ val limit: Double =
+ if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
+ try {
+ val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
+ if (value > limit)
+ error("floating point number too large")
+ if (negated) -value else value
+ } catch {
+ case _: NumberFormatException =>
+ error("malformed floating point number")
+ 0.0
+ }
+ }
+
+ def floatVal: Double = floatVal(false)
+
+ }
+
+ class Scanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) {
+ val keepComments = ctx.settings.YkeepComments.value
+
+ /** All doc comments as encountered, each list contains doc comments from
+ * the same block level. Starting with the deepest level and going upward
+ */
+ private[this] var docsPerBlockStack: List[List[Comment]] = List(Nil)
+
+ /** Adds level of nesting to docstrings */
+ def enterBlock(): Unit =
+ docsPerBlockStack = List(Nil) ::: docsPerBlockStack
+
+ /** Removes level of nesting for docstrings */
+ def exitBlock(): Unit = docsPerBlockStack = docsPerBlockStack match {
+ case x :: Nil => List(Nil)
+ case _ => docsPerBlockStack.tail
+ }
+
+ /** Returns the closest docstring preceding the position supplied */
+ def getDocComment(pos: Int): Option[Comment] = {
+ def closest(c: Comment, docstrings: List[Comment]): Comment = docstrings match {
+ case x :: xs if (c.pos.end < x.pos.end && x.pos.end <= pos) => closest(x, xs)
+ case Nil => c
+ }
+
+ docsPerBlockStack match {
+ case (list @ (x :: xs)) :: _ => {
+ val c = closest(x, xs)
+ docsPerBlockStack = list.dropWhile(_ != c).tail :: docsPerBlockStack.tail
+ Some(c)
+ }
+ case _ => None
+ }
+ }
+
+ /** A buffer for comments */
+ val commentBuf = new StringBuilder
+
+ private def handleMigration(keyword: Token): Token =
+ if (!isScala2Mode) keyword
+ else if (keyword == INLINE) treatAsIdent()
+ else keyword
+
+
+ private def treatAsIdent() = {
+ testScala2Mode(i"$name is now a keyword, write `$name` instead of $name to keep it as an identifier")
+ patch(source, Position(offset), "`")
+ patch(source, Position(offset + name.length), "`")
+ IDENTIFIER
+ }
+
+ def toToken(idx: Int): Token =
+ if (idx >= 0 && idx <= lastKeywordStart) handleMigration(kwArray(idx))
+ else IDENTIFIER
+
+ private class TokenData0 extends TokenData
+
+ /** we need one token lookahead and one token history
+ */
+ val next : TokenData = new TokenData0
+ private val prev : TokenData = new TokenData0
+
+ /** a stack of tokens which indicates whether line-ends can be statement separators
+ * also used for keeping track of nesting levels.
+ * We keep track of the closing symbol of a region. This can be
+ * RPAREN if region starts with '('
+ * RBRACKET if region starts with '['
+ * RBRACE if region starts with '{'
+ * ARROW if region starts with `case'
+ * STRINGLIT if region is a string interpolation expression starting with '${'
+ * (the STRINGLIT appears twice in succession on the stack iff the
+ * expression is a multiline string literal).
+ */
+ var sepRegions: List[Token] = List()
+
+// Scala 2 compatibility
+
+ val isScala2Mode = ctx.settings.language.value.contains(nme.Scala2.toString)
+
+ /** Cannot use ctx.featureEnabled because accessing the context would force too much */
+ def testScala2Mode(msg: String, pos: Position = Position(offset)) = {
+ if (isScala2Mode) ctx.migrationWarning(msg, source atPos pos)
+ isScala2Mode
+ }
+
+// Get next token ------------------------------------------------------------
+
+ /** Are we directly in a string interpolation expression?
+ */
+ private def inStringInterpolation =
+ sepRegions.nonEmpty && sepRegions.head == STRINGLIT
+
+ /** Are we directly in a multiline string interpolation expression?
+ * @pre inStringInterpolation
+ */
+ private def inMultiLineInterpolation =
+ inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+
+ /** read next token and return last offset
+ */
+ def skipToken(): Offset = {
+ val off = offset
+ nextToken()
+ off
+ }
+
+ def adjustSepRegions(lastToken: Token): Unit = (lastToken: @switch) match {
+ case LPAREN =>
+ sepRegions = RPAREN :: sepRegions
+ case LBRACKET =>
+ sepRegions = RBRACKET :: sepRegions
+ case LBRACE =>
+ sepRegions = RBRACE :: sepRegions
+ case CASE =>
+ sepRegions = ARROW :: sepRegions
+ case RBRACE =>
+ while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
+ sepRegions = sepRegions.tail
+ if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
+ case RBRACKET | RPAREN =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case ARROW =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case STRINGLIT =>
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
+ case _ =>
+ }
+
+ /** Produce next token, filling TokenData fields of Scanner.
+ */
+ def nextToken(): Unit = {
+ val lastToken = token
+ adjustSepRegions(lastToken)
+
+ // Read a token or copy it from `next` tokenData
+ if (next.token == EMPTY) {
+ lastOffset = lastCharOffset
+ if (inStringInterpolation) fetchStringPart()
+ else fetchToken()
+ if (token == ERROR) adjustSepRegions(STRINGLIT)
+ } else {
+ this copyFrom next
+ next.token = EMPTY
+ }
+
+ /** Insert NEWLINE or NEWLINES if
+ * - we are after a newline
+ * - we are within a { ... } or on toplevel (wrt sepRegions)
+ * - the current token can start a statement and the one before can end it
+ * insert NEWLINES if we are past a blank line, NEWLINE otherwise
+ */
+ if (isAfterLineEnd() &&
+ (canEndStatTokens contains lastToken) &&
+ (canStartStatTokens contains token) &&
+ (sepRegions.isEmpty || sepRegions.head == RBRACE)) {
+ next copyFrom this
+ // todo: make offset line-end of previous line?
+ offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset
+ token = if (pastBlankLine()) NEWLINES else NEWLINE
+ }
+
+ postProcessToken()
+ // print("[" + this +"]")
+ }
+
+ def postProcessToken() = {
+ // Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE
+ def lookahead() = {
+ prev copyFrom this
+ fetchToken()
+ }
+ def reset(nextLastOffset: Offset) = {
+ lastOffset = nextLastOffset
+ next copyFrom this
+ this copyFrom prev
+ }
+ def fuse(tok: Int) = {
+ token = tok
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
+ if (token == CASE) {
+ val nextLastOffset = lastCharOffset
+ lookahead()
+ if (token == CLASS) fuse(CASECLASS)
+ else if (token == OBJECT) fuse(CASEOBJECT)
+ else reset(nextLastOffset)
+ } else if (token == SEMI) {
+ val nextLastOffset = lastCharOffset
+ lookahead()
+ if (token != ELSE) reset(nextLastOffset)
+ }
+ }
+
+ /** Is current token first one after a newline? */
+ def isAfterLineEnd(): Boolean =
+ lastOffset < lineStartOffset &&
+ (lineStartOffset <= offset ||
+ lastOffset < lastLineStartOffset && lastLineStartOffset <= offset)
+
+ /** Is there a blank line between the current token and the last one?
+ * @pre afterLineEnd().
+ */
+ private def pastBlankLine(): Boolean = {
+ val end = offset
+ def recur(idx: Offset, isBlank: Boolean): Boolean =
+ idx < end && {
+ val ch = buf(idx)
+ if (ch == LF || ch == FF) isBlank || recur(idx + 1, true)
+ else recur(idx + 1, isBlank && ch <= ' ')
+ }
+ recur(lastOffset, false)
+ }
+
+ /** read next token, filling TokenData fields of Scanner.
+ */
+ protected final def fetchToken(): Unit = {
+ offset = charOffset - 1
+ (ch: @switch) match {
+ case ' ' | '\t' | CR | LF | FF =>
+ nextChar()
+ fetchToken()
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' | '_' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ if (ch == '"' && token == IDENTIFIER)
+ token = INTERPOLATIONID
+ case '<' => // is XMLSTART?
+ def fetchLT() = {
+ val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
+ nextChar()
+ last match {
+ case ' ' | '\t' | '\n' | '{' | '(' | '>' if isNameStart(ch) || ch == '!' || ch == '?' =>
+ token = XMLSTART
+ case _ =>
+ // Console.println("found '<', but last is '" + in.last +"'"); // DEBUG
+ putChar('<')
+ getOperatorRest()
+ }
+ }
+ fetchLT
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | /*'<' | */
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' =>
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ case '/' =>
+ if (skipComment()) {
+ fetchToken()
+ } else {
+ putChar('/')
+ getOperatorRest()
+ }
+ case '0' =>
+ def fetchZero() = {
+ putChar(ch)
+ nextChar()
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ /**
+ * What should leading 0 be in the future? It is potentially dangerous
+ * to let it be base-10 because of history. Should it be an error? Is
+ * there a realistic situation where one would need it?
+ */
+ if (isDigit(ch))
+ error("Non-zero numbers may not have a leading zero.")
+ base = 10
+ }
+ getNumber()
+ }
+ fetchZero
+ case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ base = 10
+ getNumber()
+ case '`' =>
+ getBackquotedIdent()
+ case '\"' =>
+ def fetchDoubleQuote() = {
+ if (token == INTERPOLATIONID) {
+ nextRawChar()
+ if (ch == '\"') {
+ nextRawChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getStringPart(multiLine = true)
+ sepRegions = STRINGPART :: sepRegions // indicate string part
+ sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ getStringPart(multiLine = false)
+ sepRegions = STRINGLIT :: sepRegions // indicate single line string part
+ }
+ } else {
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getRawStringLit()
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ getStringLit()
+ }
+ }
+ }
+ fetchDoubleQuote
+ case '\'' =>
+ def fetchSingleQuote() = {
+ nextChar()
+ if (isIdentifierStart(ch))
+ charLitOr(getIdentRest)
+ else if (isOperatorPart(ch) && (ch != '\\'))
+ charLitOr(getOperatorRest)
+ else {
+ getLitChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ error("unclosed character literal")
+ }
+ }
+ }
+ fetchSingleQuote
+ case '.' =>
+ nextChar()
+ if ('0' <= ch && ch <= '9') {
+ putChar('.'); getFraction(); setStrVal()
+ } else {
+ token = DOT
+ }
+ case ';' =>
+ nextChar(); token = SEMI
+ case ',' =>
+ nextChar(); token = COMMA
+ case '(' =>
+ enterBlock(); nextChar(); token = LPAREN
+ case '{' =>
+ enterBlock(); nextChar(); token = LBRACE
+ case ')' =>
+ exitBlock(); nextChar(); token = RPAREN
+ case '}' =>
+ exitBlock(); nextChar(); token = RBRACE
+ case '[' =>
+ nextChar(); token = LBRACKET
+ case ']' =>
+ nextChar(); token = RBRACKET
+ case SU =>
+ if (isAtEnd) token = EOF
+ else {
+ error("illegal character")
+ nextChar()
+ }
+ case _ =>
+ def fetchOther() = {
+ if (ch == '\u21D2') {
+ nextChar(); token = ARROW
+ } else if (ch == '\u2190') {
+ nextChar(); token = LARROW
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else if (isSpecial(ch)) {
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ } else {
+ error(f"illegal character '\\u${ch: Int}%04x'")
+ nextChar()
+ }
+ }
+ fetchOther
+ }
+ }
+
+ private def skipComment(): Boolean = {
+ def appendToComment(ch: Char) =
+ if (keepComments) commentBuf.append(ch)
+ def nextChar() = {
+ appendToComment(ch)
+ Scanner.this.nextChar()
+ }
+ def skipLine(): Unit = {
+ nextChar()
+ if ((ch != CR) && (ch != LF) && (ch != SU)) skipLine()
+ }
+ @tailrec
+ def skipComment(): Unit = {
+ if (ch == '/') {
+ nextChar()
+ if (ch == '*') nestedComment()
+ skipComment()
+ }
+ else if (ch == '*') {
+ do nextChar() while (ch == '*')
+ if (ch == '/') nextChar()
+ else skipComment()
+ }
+ else if (ch == SU) incompleteInputError("unclosed comment")
+ else { nextChar(); skipComment() }
+ }
+ def nestedComment() = { nextChar(); skipComment() }
+ val start = lastCharOffset
+ def finishComment(): Boolean = {
+ if (keepComments) {
+ val pos = Position(start, charOffset, start)
+ val comment = Comment(pos, flushBuf(commentBuf))
+
+ if (comment.isDocComment)
+ docsPerBlockStack = (docsPerBlockStack.head :+ comment) :: docsPerBlockStack.tail
+ }
+
+ true
+ }
+ nextChar()
+ if (ch == '/') { skipLine(); finishComment() }
+ else if (ch == '*') { nextChar(); skipComment(); finishComment() }
+ else false
+ }
+
+// Identifiers ---------------------------------------------------------------
+
+ private def getBackquotedIdent(): Unit = {
+ nextChar()
+ getLitChars('`')
+ if (ch == '`') {
+ nextChar()
+ finishNamed(BACKQUOTED_IDENT)
+ if (name.length == 0)
+ error("empty quoted identifier")
+ else if (name == nme.WILDCARD)
+ error("wildcard invalid as backquoted identifier")
+ }
+ else error("unclosed quoted identifier")
+ }
+
+ private def getIdentRest(): Unit = (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' |
+ '0' | '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ case '_' =>
+ putChar(ch)
+ nextChar()
+ getIdentOrOperatorRest()
+ case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true!
+ finishNamed()
+ case _ =>
+ if (Character.isUnicodeIdentifierPart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else {
+ finishNamed()
+ }
+ }
+
+ private def getOperatorRest(): Unit = (ch: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' =>
+ putChar(ch); nextChar(); getOperatorRest()
+ case '/' =>
+ if (skipComment()) finishNamed()
+ else { putChar('/'); getOperatorRest() }
+ case _ =>
+ if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() }
+ else finishNamed()
+ }
+
+ private def getIdentOrOperatorRest(): Unit = {
+ if (isIdentifierPart(ch))
+ getIdentRest()
+ else ch match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' | '/' =>
+ getOperatorRest()
+ case _ =>
+ if (isSpecial(ch)) getOperatorRest()
+ else finishNamed()
+ }
+ }
+
+
+// Literals -----------------------------------------------------------------
+
+ private def getStringLit() = {
+ getLitChars('"')
+ if (ch == '"') {
+ setStrVal()
+ nextChar()
+ token = STRINGLIT
+ } else error("unclosed string literal")
+ }
+
+ private def getRawStringLit(): Unit = {
+ if (ch == '\"') {
+ nextRawChar()
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getRawStringLit()
+ } else if (ch == SU) {
+ incompleteInputError("unclosed multi-line string literal")
+ } else {
+ putChar(ch)
+ nextRawChar()
+ getRawStringLit()
+ }
+ }
+
+ @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ def finishStringPart() = {
+ setStrVal()
+ token = STRINGPART
+ next.lastOffset = charOffset - 1
+ next.offset = charOffset - 1
+ }
+ if (ch == '"') {
+ if (multiLine) {
+ nextRawChar()
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getStringPart(multiLine)
+ } else {
+ nextChar()
+ setStrVal()
+ token = STRINGLIT
+ }
+ } else if (ch == '$') {
+ nextRawChar()
+ if (ch == '$') {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ } else if (ch == '{') {
+ finishStringPart()
+ nextRawChar()
+ next.token = LBRACE
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ finishStringPart()
+ do {
+ putChar(ch)
+ nextRawChar()
+ } while (ch != SU && Character.isUnicodeIdentifierPart(ch))
+ finishNamed(target = next)
+ } else {
+ error("invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected")
+ }
+ } else {
+ val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
+ if (isUnclosedLiteral) {
+ if (multiLine)
+ incompleteInputError("unclosed multi-line string literal")
+ else
+ error("unclosed string literal")
+ }
+ else {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ }
+ }
+ }
+
+ private def fetchStringPart() = {
+ offset = charOffset - 1
+ getStringPart(multiLine = inMultiLineInterpolation)
+ }
+
+ private def isTripleQuote(): Boolean =
+ if (ch == '"') {
+ nextRawChar()
+ if (ch == '"') {
+ nextChar()
+ while (ch == '"') {
+ putChar('"')
+ nextChar()
+ }
+ true
+ } else {
+ putChar('"')
+ putChar('"')
+ false
+ }
+ } else {
+ putChar('"')
+ false
+ }
+
+ /** copy current character into litBuf, interpreting any escape sequences,
+ * and advance to next character.
+ */
+ protected def getLitChar(): Unit =
+ if (ch == '\\') {
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ val leadch: Char = ch
+ var oct: Int = digit2int(ch, 8)
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ if (leadch <= '3' && '0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ }
+ }
+ putChar(oct.toChar)
+ } else {
+ ch match {
+ case 'b' => putChar('\b')
+ case 't' => putChar('\t')
+ case 'n' => putChar('\n')
+ case 'f' => putChar('\f')
+ case 'r' => putChar('\r')
+ case '\"' => putChar('\"')
+ case '\'' => putChar('\'')
+ case '\\' => putChar('\\')
+ case _ => invalidEscape()
+ }
+ nextChar()
+ }
+ } else {
+ putChar(ch)
+ nextChar()
+ }
+
+ protected def invalidEscape(): Unit = {
+ error("invalid escape character", charOffset - 1)
+ putChar(ch)
+ }
+
+ private def getLitChars(delimiter: Char) = {
+ while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape))
+ getLitChar()
+ }
+
+ /** read fractional part and exponent of floating point number
+ * if one is present.
+ */
+ protected def getFraction(): Unit = {
+ token = DOUBLELIT
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ if (ch == 'e' || ch == 'E') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '+' || lookahead.ch == '-') {
+ lookahead.nextChar()
+ }
+ if ('0' <= lookahead.ch && lookahead.ch <= '9') {
+ putChar(ch)
+ nextChar()
+ if (ch == '+' || ch == '-') {
+ putChar(ch)
+ nextChar()
+ }
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ }
+ token = DOUBLELIT
+ }
+ if (ch == 'd' || ch == 'D') {
+ putChar(ch)
+ nextChar()
+ token = DOUBLELIT
+ } else if (ch == 'f' || ch == 'F') {
+ putChar(ch)
+ nextChar()
+ token = FLOATLIT
+ }
+ checkNoLetter()
+ }
+ def checkNoLetter(): Unit = {
+ if (isIdentifierPart(ch) && ch >= ' ')
+ error("Invalid literal number")
+ }
+
+ /** Read a number into strVal and set base
+ */
+ protected def getNumber(): Unit = {
+ while (digit2int(ch, base) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ token = INTLIT
+ if (base == 10 && ch == '.') {
+ val isDefinitelyNumber = {
+ val lookahead = lookaheadReader
+ val c = lookahead.getc()
+ (c: @switch) match {
+ /** Another digit is a giveaway. */
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ true
+
+ /** Backquoted idents like 22.`foo`. */
+ case '`' =>
+ false
+
+ /** These letters may be part of a literal, or a method invocation on an Int.
+ */
+ case 'd' | 'D' | 'f' | 'F' =>
+ !isIdentifierPart(lookahead.getc())
+
+ /** A little more special handling for e.g. 5e7 */
+ case 'e' | 'E' =>
+ val ch = lookahead.getc()
+ !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
+
+ case x =>
+ !isIdentifierStart(x)
+ }
+ }
+ if (isDefinitelyNumber) {
+ putChar(ch)
+ nextChar()
+ getFraction()
+ }
+ } else (ch: @switch) match {
+ case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' =>
+ if (base == 10) getFraction()
+ case 'l' | 'L' =>
+ nextChar()
+ token = LONGLIT
+ case _ =>
+ }
+ setStrVal()
+ }
+
+ /** Parse character literal if current character is followed by \',
+ * or follow with given op and return a symbol literal token
+ */
+ def charLitOr(op: () => Unit): Unit = {
+ putChar(ch)
+ nextChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ op()
+ token = SYMBOLLIT
+ strVal = name.toString
+ }
+ }
+ override def toString =
+ showTokenDetailed(token) + {
+ if ((identifierTokens contains token) || (literalTokens contains token)) " " + name
+ else ""
+ }
+
+ def show: String = token match {
+ case IDENTIFIER | BACKQUOTED_IDENT => s"id($name)"
+ case CHARLIT => s"char($intVal)"
+ case INTLIT => s"int($intVal)"
+ case LONGLIT => s"long($intVal)"
+ case FLOATLIT => s"float($floatVal)"
+ case DOUBLELIT => s"double($floatVal)"
+ case STRINGLIT => s"string($strVal)"
+ case STRINGPART => s"stringpart($strVal)"
+ case INTERPOLATIONID => s"interpolationid($name)"
+ case SEMI => ";"
+ case NEWLINE => ";"
+ case NEWLINES => ";;"
+ case COMMA => ","
+ case _ => showToken(token)
+ }
+
+// (does not seem to be needed) def flush = { charOffset = offset; nextChar(); this }
+
+ /* Resume normal scanning after XML */
+ def resume(lastToken: Token) = {
+ token = lastToken
+ if (next.token != EMPTY && !ctx.reporter.hasErrors)
+ error("unexpected end of input: possible missing '}' in XML block")
+
+ nextToken()
+ }
+
+ /* Initialization: read first char, then first token */
+ nextChar()
+ nextToken()
+ } // end Scanner
+
+ // ------------- keyword configuration -----------------------------------
+
+ val (lastKeywordStart, kwArray) = buildKeywordArray(keywords)
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
new file mode 100644
index 000000000..afa7fefab
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
@@ -0,0 +1,145 @@
+package dotty.tools
+package dotc
+package parsing
+
+import util.SourceFile
+import core._
+import Contexts._
+import Parsers._
+
+
+/** <p>Performs the following context-free rewritings:</p>
+ * <ol>
+ * <li>
+ * Places all pattern variables in Bind nodes. In a pattern, for
+ * identifiers <code>x</code>:<pre>
+ * x => x @ _
+ * x:T => x @ (_ : T)</pre>
+ * </li>
+ * <li>Removes pattern definitions (PatDef's) as follows:
+ * If pattern is a simple (typed) identifier:<pre>
+ * <b>val</b> x = e ==> <b>val</b> x = e
+ * <b>val</b> x: T = e ==> <b>val</b> x: T = e</pre>
+ *
+ * if there are no variables in pattern<pre>
+ * <b>val</b> p = e ==> e match (case p => ())</pre>
+ *
+ * if there is exactly one variable in pattern<pre>
+ * <b>val</b> x_1 = e <b>match</b> (case p => (x_1))</pre>
+ *
+ * if there is more than one variable in pattern<pre>
+ * <b>val</b> p = e ==> <b>private synthetic val</b> t$ = e <b>match</b> (case p => (x_1, ..., x_N))
+ * <b>val</b> x_1 = t$._1
+ * ...
+ * <b>val</b> x_N = t$._N</pre>
+ * </li>
+ * <li>
+ * Removes function types as follows:<pre>
+ * (argtpes) => restpe ==> scala.Function_n[argtpes, restpe]</pre>
+ * </li>
+ * <li>
+ * Wraps naked case definitions in a match as follows:<pre>
+ * { cases } ==> (x => x.match {cases})<span style="font-family:normal;">, except when already argument to match</span></pre>
+ * </li>
+ * </ol>
+ */
+object ScriptParsers {
+
+ import ast.untpd._
+
+ class ScriptParser(source: SourceFile)(implicit ctx: Context) extends Parser(source) {
+
+ /** This is the parse entry point for code which is not self-contained, e.g.
+ * a script which is a series of template statements. They will be
+ * swaddled in Trees until the AST is equivalent to the one returned
+ * by compilationUnit().
+ */
+ override def parse(): Tree = unsupported("parse")
+ /* TODO: reinstantiate
+ val stmts = templateStatSeq(false)._2
+ accept(EOF)
+
+ def mainModuleName = ctx.settings.script.value
+
+ /** If there is only a single object template in the file and it has a
+ * suitable main method, we will use it rather than building another object
+ * around it. Since objects are loaded lazily the whole script would have
+ * been a no-op, so we're not taking much liberty.
+ */
+ def searchForMain(): Option[Tree] = {
+ /** Have to be fairly liberal about what constitutes a main method since
+ * nothing has been typed yet - for instance we can't assume the parameter
+ * type will look exactly like "Array[String]" as it could have been renamed
+ * via import, etc.
+ */
+ def isMainMethod(t: Tree) = t match {
+ case DefDef(_, nme.main, Nil, List(_), _, _) => true
+ case _ => false
+ }
+ /** For now we require there only be one top level object. */
+ var seenModule = false
+ val newStmts = stmts collect {
+ case t @ Import(_, _) => t
+ case md @ ModuleDef(mods, name, template)
+ if !seenModule && (template.body exists isMainMethod) =>
+ seenModule = true
+ /** This slightly hacky situation arises because we have no way to communicate
+ * back to the scriptrunner what the name of the program is. Even if we were
+ * willing to take the sketchy route of settings.script.value = progName, that
+ * does not work when using fsc. And to find out in advance would impose a
+ * whole additional parse. So instead, if the actual object's name differs from
+ * what the script is expecting, we transform it to match.
+ */
+ md.derivedModuleDef(mods, mainModuleName.toTermName, template)
+ case _ =>
+ /** If we see anything but the above, fail. */
+ return None
+ }
+ Some(makePackaging(0, emptyPkg, newStmts))
+ }
+
+ if (mainModuleName == ScriptRunner.defaultScriptMain)
+ searchForMain() foreach { return _ }
+
+ /** Here we are building an AST representing the following source fiction,
+ * where <moduleName> is from -Xscript (defaults to "Main") and <stmts> are
+ * the result of parsing the script file.
+ *
+ * object <moduleName> {
+ * def main(argv: Array[String]): Unit = {
+ * val args = argv
+ * new AnyRef {
+ * <stmts>
+ * }
+ * }
+ * }
+ */
+ import definitions._
+
+ def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
+ def emptyInit = DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ Nil,
+ List(Nil),
+ TypeTree(),
+ Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ )
+
+ // def main
+ def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
+ def mainParameter = List(ValDef(Modifiers(Param), "argv", mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(Modifiers(), "args", TypeTree(), Ident("argv")))
+ def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
+ def mainDef = DefDef(Modifiers(), nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
+
+ // object Main
+ def moduleName = ScriptRunner scriptMain settings
+ def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
+ def moduleDef = ModuleDef(Modifiers(), moduleName, moduleBody)
+
+ // package <empty> { ... }
+ makePackaging(0, emptyPkg, List(moduleDef))
+ }*/
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
new file mode 100644
index 000000000..20b655a19
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
@@ -0,0 +1,264 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable
+import scala.xml.{ EntityRef, Text }
+import core._
+import Flags.Mutable
+import Names._, StdNames._, ast.Trees._, ast.{tpd, untpd}
+import Symbols._, Contexts._
+import util.Positions._
+import Parsers.Parser
+import scala.reflect.internal.util.StringOps.splitWhere
+import scala.language.implicitConversions
+
+/** This class builds instance of `Tree` that represent XML.
+ *
+ * Note from martin: This needs to have its position info reworked. I don't
+ * understand exactly what's done here. To make validation pass, I set many
+ * positions to be transparent. Not sure this is a good idea for navigating
+ * XML trees in the IDE but it's the best I can do right now. If someone
+ * who understands this part better wants to give it a shot, please do!
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Context) {
+
+ import Constants.Constant
+ import untpd._
+
+ import parser.atPos
+
+ private[parsing] var isPattern: Boolean = _
+
+ private object xmltypes extends ScalaTypeNames {
+ val _Comment: TypeName = "Comment"
+ val _Elem: TypeName = "Elem"
+ val _EntityRef: TypeName = "EntityRef"
+ val _Group: TypeName = "Group"
+ val _MetaData: TypeName = "MetaData"
+ val _NamespaceBinding: TypeName = "NamespaceBinding"
+ val _NodeBuffer: TypeName = "NodeBuffer"
+ val _PrefixedAttribute: TypeName = "PrefixedAttribute"
+ val _ProcInstr: TypeName = "ProcInstr"
+ val _Text: TypeName = "Text"
+ val _Unparsed: TypeName = "Unparsed"
+ val _UnprefixedAttribute: TypeName = "UnprefixedAttribute"
+ }
+
+ private object xmlterms extends ScalaTermNames {
+ val _Null: TermName = "Null"
+ val __Elem: TermName = "Elem"
+ val __Text: TermName = "Text"
+ val _buf: TermName = "$buf"
+ val _md: TermName = "$md"
+ val _plus: TermName = "$amp$plus"
+ val _scope: TermName = "$scope"
+ val _tmpscope: TermName = "$tmpscope"
+ val _xml: TermName = "xml"
+ }
+
+ import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
+ _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute}
+
+ import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
+
+ // convenience methods
+ private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
+ private def const(x: Any) = Literal(Constant(x))
+ private def wild = Ident(nme.WILDCARD)
+ private def wildStar = Ident(tpnme.WILDCARD_STAR)
+ private def _scala(name: Name) = scalaDot(name)
+ private def _scala_xml(name: Name) = Select(_scala(_xml), name)
+
+ private def _scala_xml_Comment = _scala_xml(_Comment)
+ private def _scala_xml_Elem = _scala_xml(_Elem)
+ private def _scala_xml_EntityRef = _scala_xml(_EntityRef)
+ private def _scala_xml_Group = _scala_xml(_Group)
+ private def _scala_xml_MetaData = _scala_xml(_MetaData)
+ private def _scala_xml_NamespaceBinding = _scala_xml(_NamespaceBinding)
+ private def _scala_xml_NodeBuffer = _scala_xml(_NodeBuffer)
+ private def _scala_xml_Null = _scala_xml(_Null)
+ private def _scala_xml_PrefixedAttribute = _scala_xml(_PrefixedAttribute)
+ private def _scala_xml_ProcInstr = _scala_xml(_ProcInstr)
+ private def _scala_xml_Text = _scala_xml(_Text)
+ private def _scala_xml_Unparsed = _scala_xml(_Unparsed)
+ private def _scala_xml_UnprefixedAttribute= _scala_xml(_UnprefixedAttribute)
+ private def _scala_xml__Elem = _scala_xml(__Elem)
+ private def _scala_xml__Text = _scala_xml(__Text)
+
+ /** Wildly wrong documentation deleted in favor of "self-documenting code." */
+ protected def mkXML(
+ pos: Position,
+ isPattern: Boolean,
+ pre: Tree,
+ label: Tree,
+ attrs: Tree,
+ scope: Tree,
+ empty: Boolean,
+ children: Seq[Tree]): Tree =
+ {
+ def starArgs =
+ if (children.isEmpty) Nil
+ else List(Typed(makeXMLseq(pos, children), wildStar))
+
+ def pat = Apply(_scala_xml__Elem, List(pre, label, wild, wild) ::: convertToTextPat(children))
+ def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope, if (empty) Literal(Constant(true)) else Literal(Constant(false))) ::: starArgs))
+
+ atPos(pos) { if (isPattern) pat else nonpat }
+ }
+
+ final def entityRef(pos: Position, n: String) =
+ atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) )
+
+ // create scala.xml.Text here <: scala.xml.Node
+ final def text(pos: Position, txt: String): Tree = atPos(pos) {
+ if (isPattern) makeTextPat(const(txt))
+ else makeText1(const(txt))
+ }
+
+ def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
+ def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt))
+ def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) )
+ def charData(pos: Position, txt: String) = atPos(pos)( makeText1(const(txt)) )
+
+ def procInstr(pos: Position, target: String, txt: String) =
+ atPos(pos)( ProcInstr(const(target), const(txt)) )
+
+ protected def Comment(txt: Tree) = New(_scala_xml_Comment, LL(txt))
+ protected def ProcInstr(target: Tree, txt: Tree) = New(_scala_xml_ProcInstr, LL(target, txt))
+
+ /** @todo: attributes */
+ def makeXMLpat(pos: Position, n: String, args: Seq[Tree]): Tree = {
+ val (prepat, labpat) = splitPrefix(n) match {
+ case (Some(pre), rest) => (const(pre), const(rest))
+ case _ => (wild, const(n))
+ }
+ mkXML(pos, true, prepat, labpat, null, null, false, args)
+ }
+
+ protected def convertToTextPat(t: Tree): Tree = t match {
+ case _: Literal => makeTextPat(t)
+ case _ => t
+ }
+ protected def convertToTextPat(buf: Seq[Tree]): List[Tree] =
+ (buf map convertToTextPat).toList
+
+ def parseAttribute(pos: Position, s: String): Tree = {
+ val ts = scala.xml.Utility.parseAttributeValue(s) map {
+ case Text(s) => text(pos, s)
+ case EntityRef(s) => entityRef(pos, s)
+ }
+ ts.length match {
+ case 0 => TypedSplice(tpd.ref(defn.NilModule) withPos pos)
+ case 1 => ts.head
+ case _ => makeXMLseq(pos, ts.toList)
+ }
+ }
+
+ def isEmptyText(t: Tree) = t match {
+ case Literal(Constant("")) => true
+ case _ => false
+ }
+
+ /** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */
+ def makeXMLseq(pos: Position, args: Seq[Tree]) = {
+ val buffer = ValDef(_buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil))
+ val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t)))
+
+ atPos(pos)( Block(buffer :: applies.toList, Ident(_buf)) )
+ }
+
+ /** Returns (Some(prefix) | None, rest) based on position of ':' */
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ case Some((pre, rest)) => (Some(pre), rest)
+ case _ => (None, name)
+ }
+
+ /** Various node constructions. */
+ def group(pos: Position, args: Seq[Tree]): Tree =
+ atPos(pos)( New(_scala_xml_Group, LL(makeXMLseq(pos, args))) )
+
+ def unparsed(pos: Position, str: String): Tree =
+ atPos(pos)( New(_scala_xml_Unparsed, LL(const(str))) )
+
+ def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = {
+ def handleNamespaceBinding(pre: String, z: String): Tree = {
+ def mkAssign(t: Tree): Tree = Assign(
+ Ident(_tmpscope),
+ New(_scala_xml_NamespaceBinding, LL(const(pre), t, Ident(_tmpscope)))
+ )
+
+ val uri1 = attrMap(z) match {
+ case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
+ case x => mkAssign(x)
+ }
+ attrMap -= z
+ uri1
+ }
+
+ /** Extract all the namespaces from the attribute map. */
+ val namespaces: List[Tree] =
+ for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield {
+ val ns = splitPrefix(z) match {
+ case (Some(_), rest) => rest
+ case _ => null
+ }
+ handleNamespaceBinding(ns, z)
+ }
+
+ val (pre, newlabel) = splitPrefix(qname) match {
+ case (Some(p), x) => (p, x)
+ case (None, x) => (null, x)
+ }
+
+ def mkAttributeTree(pre: String, key: String, value: Tree) = atPos(pos.toSynthetic) {
+ // XXX this is where we'd like to put Select(value, nme.toString_) for #1787
+ // after we resolve the Some(foo) situation.
+ val baseArgs = List(const(key), value, Ident(_md))
+ val (clazz, attrArgs) =
+ if (pre == null) (_scala_xml_UnprefixedAttribute, baseArgs)
+ else (_scala_xml_PrefixedAttribute , const(pre) :: baseArgs)
+
+ Assign(Ident(_md), New(clazz, LL(attrArgs: _*)))
+ }
+
+ def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value)
+ def handleUnprefixedAttribute(key: String, value: Tree) = mkAttributeTree(null, key, value)
+
+ val attributes: List[Tree] =
+ for ((k, v) <- attrMap.toList.reverse) yield splitPrefix(k) match {
+ case (Some(pre), rest) => handlePrefixedAttribute(pre, rest, v)
+ case _ => handleUnprefixedAttribute(k, v)
+ }
+
+ lazy val scopeDef = ValDef(_scope, _scala_xml_NamespaceBinding, Ident(_tmpscope))
+ lazy val tmpScopeDef = ValDef(_tmpscope, _scala_xml_NamespaceBinding, Ident(_scope)).withFlags(Mutable)
+ lazy val metadataDef = ValDef(_md, _scala_xml_MetaData, _scala_xml_Null).withFlags(Mutable)
+ val makeSymbolicAttrs = if (!attributes.isEmpty) Ident(_md) else _scala_xml_Null
+
+ val (attrResult, nsResult) =
+ (attributes.isEmpty, namespaces.isEmpty) match {
+ case (true , true) => (Nil, Nil)
+ case (true , false) => (scopeDef :: Nil, tmpScopeDef :: namespaces)
+ case (false, true) => (metadataDef :: attributes, Nil)
+ case (false, false) => (scopeDef :: metadataDef :: attributes, tmpScopeDef :: namespaces)
+ }
+
+ val body = mkXML(
+ pos.toSynthetic,
+ false,
+ const(pre),
+ const(newlabel),
+ makeSymbolicAttrs,
+ Ident(_scope),
+ empty,
+ args
+ )
+
+ atPos(pos.toSynthetic)( Block(nsResult, Block(attrResult, body)) )
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
new file mode 100644
index 000000000..5324207db
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -0,0 +1,238 @@
+package dotty.tools
+package dotc
+package parsing
+
+import collection.immutable.BitSet
+import core.Decorators._
+
+abstract class TokensCommon {
+ val maxToken: Int
+
+ type Token = Int
+ type TokenSet = BitSet
+
+ def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi: _*)
+
+ def showTokenDetailed(token: Int) = debugString(token)
+
+ def showToken(token: Int) = {
+ val str = tokenString(token)
+ if (keywords contains token) s"'$str'" else str
+ }
+
+ val tokenString, debugString = new Array[String](maxToken + 1)
+
+ def enter(token: Int, str: String, debugStr: String = ""): Unit = {
+ assert(tokenString(token) == null)
+ tokenString(token) = str
+ debugString(token) = if (debugStr.isEmpty) str else debugStr
+ }
+
+ /** special tokens */
+ final val EMPTY = 0; enter(EMPTY, "<empty>") // a missing token, used in lookahead
+ final val ERROR = 1; enter(ERROR, "erroneous token") // an erroneous token
+ final val EOF = 2; enter(EOF, "eof")
+
+ /** literals */
+ final val CHARLIT = 3; enter(CHARLIT, "character literal")
+ final val INTLIT = 4; enter(INTLIT, "integer literal")
+ final val LONGLIT = 5; enter(LONGLIT, "long literal")
+ final val FLOATLIT = 6; enter(FLOATLIT, "float literal")
+ final val DOUBLELIT = 7; enter(DOUBLELIT, "double literal")
+ final val STRINGLIT = 8; enter(STRINGLIT, "string literal")
+ final val STRINGPART = 9; enter(STRINGPART, "string literal", "string literal part")
+ //final val INTERPOLATIONID = 10; enter(INTERPOLATIONID, "string interpolator")
+ //final val SYMBOLLIT = 11; enter(SYMBOLLIT, "symbol literal") // TODO: deprecate
+
+ /** identifiers */
+ final val IDENTIFIER = 12; enter(IDENTIFIER, "identifier")
+ //final val BACKQUOTED_IDENT = 13; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
+
+ /** alphabetic keywords */
+ final val IF = 20; enter(IF, "if")
+ final val FOR = 21; enter(FOR, "for")
+ final val ELSE = 22; enter(ELSE, "else")
+ final val THIS = 23; enter(THIS, "this")
+ final val NULL = 24; enter(NULL, "null")
+ final val NEW = 25; enter(NEW, "new")
+ //final val WITH = 26; enter(WITH, "with")
+ final val SUPER = 27; enter(SUPER, "super")
+ //final val CASE = 28; enter(CASE, "case")
+ //final val CASECLASS = 29; enter(CASECLASS, "case class")
+ //final val CASEOBJECT = 30; enter(CASEOBJECT, "case object")
+ //final val VAL = 31; enter(VAL, "val")
+ final val ABSTRACT = 32; enter(ABSTRACT, "abstract")
+ final val FINAL = 33; enter(FINAL, "final")
+ final val PRIVATE = 34; enter(PRIVATE, "private")
+ final val PROTECTED = 35; enter(PROTECTED, "protected")
+ final val OVERRIDE = 36; enter(OVERRIDE, "override")
+ //final val IMPLICIT = 37; enter(IMPLICIT, "implicit")
+ //final val VAR = 38; enter(VAR, "var")
+ //final val DEF = 39; enter(DEF, "def")
+ //final val TYPE = 40; enter(TYPE, "type")
+ final val EXTENDS = 41; enter(EXTENDS, "extends")
+ final val TRUE = 42; enter(TRUE, "true")
+ final val FALSE = 43; enter(FALSE, "false")
+ //final val OBJECT = 44; enter(OBJECT, "object")
+ final val CLASS = 45; enter(CLASS, "class")
+ final val IMPORT = 46; enter(IMPORT, "import")
+ final val PACKAGE = 47; enter(PACKAGE, "package")
+ //final val YIELD = 48; enter(YIELD, "yield")
+ final val DO = 49; enter(DO, "do")
+ //final val TRAIT = 50; enter(TRAIT, "trait")
+ //final val SEALED = 51; enter(SEALED, "sealed")
+ final val THROW = 52; enter(THROW, "throw")
+ final val TRY = 53; enter(TRY, "try")
+ final val CATCH = 54; enter(CATCH, "catch")
+ final val FINALLY = 55; enter(FINALLY, "finally")
+ final val WHILE = 56; enter(WHILE, "while")
+ final val RETURN = 57; enter(RETURN, "return")
+ //final val MATCH = 58; enter(MATCH, "match")
+ //final val LAZY = 59; enter(LAZY, "lazy")
+ //final val THEN = 60; enter(THEN, "then")
+ //final val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
+ //final val INLINE = 62; enter(INLINE, "inline")
+
+ /** special symbols */
+ final val COMMA = 70; enter(COMMA, "','")
+ final val SEMI = 71; enter(SEMI, "';'")
+ final val DOT = 72; enter(DOT, "'.'")
+ //final val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
+ //final val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines")
+
+ /** special keywords */
+ //final val USCORE = 73; enter(USCORE, "_")
+ final val COLON = 74; enter(COLON, ":")
+ final val EQUALS = 75; enter(EQUALS, "=")
+ //final val LARROW = 76; enter(LARROW, "<-")
+ //final val ARROW = 77; enter(ARROW, "=>")
+ //final val SUBTYPE = 80; enter(SUBTYPE, "<:")
+ //final val SUPERTYPE = 81; enter(SUPERTYPE, ">:")
+ //final val HASH = 82; enter(HASH, "#")
+ final val AT = 83; enter(AT, "@")
+ //final val VIEWBOUND = 84; enter(VIEWBOUND, "<%") // TODO: deprecate
+
+ val keywords: TokenSet
+
+ /** parentheses */
+ final val LPAREN = 90; enter(LPAREN, "'('")
+ final val RPAREN = 91; enter(RPAREN, "')'")
+ final val LBRACKET = 92; enter(LBRACKET, "'['")
+ final val RBRACKET = 93; enter(RBRACKET, "']'")
+ final val LBRACE = 94; enter(LBRACE, "'{'")
+ final val RBRACE = 95; enter(RBRACE, "'}'")
+
+ final val firstParen = LPAREN
+ final val lastParen = RBRACE
+
+ def buildKeywordArray(keywords: TokenSet) = {
+ def start(tok: Token) = tokenString(tok).toTermName.start
+ def sourceKeywords = keywords.toList.filter { (kw: Token) =>
+ val ts = tokenString(kw)
+ (ts != null) && !ts.contains(' ')
+ }
+
+ val lastKeywordStart = sourceKeywords.map(start).max
+
+ val arr = Array.fill(lastKeywordStart + 1)(IDENTIFIER)
+ for (kw <- sourceKeywords) arr(start(kw)) = kw
+ (lastKeywordStart, arr)
+ }
+}
+
+object Tokens extends TokensCommon {
+ final val minToken = EMPTY
+ final val maxToken = XMLSTART
+
+ final val INTERPOLATIONID = 10; enter(INTERPOLATIONID, "string interpolator")
+ final val SYMBOLLIT = 11; enter(SYMBOLLIT, "symbol literal") // TODO: deprecate
+
+ final val BACKQUOTED_IDENT = 13; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
+
+ final val identifierTokens = BitSet(IDENTIFIER, BACKQUOTED_IDENT)
+
+ def isIdentifier(token : Int) =
+ token >= IDENTIFIER && token <= BACKQUOTED_IDENT
+
+ /** alphabetic keywords */
+ final val WITH = 26; enter(WITH, "with")
+ final val CASE = 28; enter(CASE, "case")
+ final val CASECLASS = 29; enter(CASECLASS, "case class")
+ final val CASEOBJECT = 30; enter(CASEOBJECT, "case object")
+ final val VAL = 31; enter(VAL, "val")
+ final val IMPLICIT = 37; enter(IMPLICIT, "implicit")
+ final val VAR = 38; enter(VAR, "var")
+ final val DEF = 39; enter(DEF, "def")
+ final val TYPE = 40; enter(TYPE, "type")
+ final val OBJECT = 44; enter(OBJECT, "object")
+ final val YIELD = 48; enter(YIELD, "yield")
+ final val TRAIT = 50; enter(TRAIT, "trait")
+ final val SEALED = 51; enter(SEALED, "sealed")
+ final val MATCH = 58; enter(MATCH, "match")
+ final val LAZY = 59; enter(LAZY, "lazy")
+ final val THEN = 60; enter(THEN, "then")
+ final val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
+ final val INLINE = 62; enter(INLINE, "inline")
+
+ /** special symbols */
+ final val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
+ final val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines")
+
+ /** special keywords */
+ final val USCORE = 73; enter(USCORE, "_")
+ final val LARROW = 76; enter(LARROW, "<-")
+ final val ARROW = 77; enter(ARROW, "=>")
+ final val SUBTYPE = 80; enter(SUBTYPE, "<:")
+ final val SUPERTYPE = 81; enter(SUPERTYPE, ">:")
+ final val HASH = 82; enter(HASH, "#")
+ final val VIEWBOUND = 84; enter(VIEWBOUND, "<%") // TODO: deprecate
+
+ /** XML mode */
+ final val XMLSTART = 96; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate
+
+ final val alphaKeywords = tokenRange(IF, INLINE)
+ final val symbolicKeywords = tokenRange(USCORE, VIEWBOUND)
+ final val symbolicTokens = tokenRange(COMMA, VIEWBOUND)
+ final val keywords = alphaKeywords | symbolicKeywords
+
+ final val allTokens = tokenRange(minToken, maxToken)
+
+ final val simpleLiteralTokens = tokenRange(CHARLIT, STRINGLIT) | BitSet(TRUE, FALSE)
+ final val literalTokens = simpleLiteralTokens | BitSet(INTERPOLATIONID, SYMBOLLIT, NULL)
+
+ final val atomicExprTokens = literalTokens | identifierTokens | BitSet(
+ USCORE, NULL, THIS, SUPER, TRUE, FALSE, RETURN, XMLSTART)
+
+ final val canStartExpressionTokens = atomicExprTokens | BitSet(
+ LBRACE, LPAREN, IF, DO, WHILE, FOR, NEW, TRY, THROW)
+
+ final val canStartTypeTokens = literalTokens | identifierTokens | BitSet(
+ THIS, SUPER, USCORE, LPAREN, AT)
+
+ final val templateIntroTokens = BitSet(CLASS, TRAIT, OBJECT, CASECLASS, CASEOBJECT)
+
+ final val dclIntroTokens = BitSet(DEF, VAL, VAR, TYPE)
+
+ final val defIntroTokens = templateIntroTokens | dclIntroTokens
+
+ final val localModifierTokens = BitSet(
+ ABSTRACT, FINAL, SEALED, IMPLICIT, INLINE, LAZY)
+
+ final val accessModifierTokens = BitSet(
+ PRIVATE, PROTECTED)
+
+ final val modifierTokens = localModifierTokens | accessModifierTokens | BitSet(
+ OVERRIDE)
+
+ /** Is token only legal as start of statement (eof also included)? */
+ final val mustStartStatTokens = defIntroTokens | modifierTokens | BitSet(
+ IMPORT, PACKAGE)
+
+ final val canStartStatTokens = canStartExpressionTokens | mustStartStatTokens | BitSet(
+ AT, CASE)
+
+ final val canEndStatTokens = atomicExprTokens | BitSet(
+ TYPE, RPAREN, RBRACE, RBRACKET)
+
+ final val numericLitTokens = BitSet(INTLIT, LONGLIT, FLOATLIT, DOUBLELIT)
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused b/compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused
new file mode 100644
index 000000000..672c85179
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused
@@ -0,0 +1,535 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core._
+import Flags._, Trees._, TypedTrees._, UntypedTrees._, Names._, StdNames._, NameOps._, Contexts._
+import scala.collection.mutable.ListBuffer
+import util.Positions._, Symbols._, Decorators._, Flags._, Constants._
+import TreeInfo._
+
+/** Methods for building trees, used in the parser. All the trees
+ * returned by this class must be untyped.
+ * Note: currently unused
+ */
+class TreeBuilder(implicit ctx: Context) {
+
+ import untpd._
+
+ def scalaDot(name: Name): Select =
+ Select(new TypedSplice(tpd.Ident(defn.ScalaPackageVal.termRef)), name)
+
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
+ def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
+ def scalaAnyConstr = scalaDot(tpnme.Any)
+ def scalaUnitConstr = scalaDot(tpnme.Unit)
+ def productConstr = scalaDot(tpnme.Product)
+ def productConstrN(n: Int) = scalaDot(("Product" + n).toTypeName)
+ def serializableConstr = scalaDot(tpnme.Serializable)
+
+ def convertToTypeName(t: Tree): Tree = ???
+
+ private implicit val cpos = NoPosition
+
+ /** Convert all occurrences of (lower-case) variables in a pattern as follows:
+ * x becomes x @ _
+ * x: T becomes x @ (_: T)
+ * Also covert all toplevel lower-case type arguments as follows:
+ * t becomes t @ _
+ */
+ private object patvarTransformer extends TreeTransformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(name) if isVarPattern(tree) && name != nme.WILDCARD =>
+ Bind(
+ name, Ident(nme.WILDCARD).withPos(tree.pos.focus)
+ ).withPos(tree.pos)
+ case Typed(id @ Ident(name), tpt) if isVarPattern(id) && name != nme.WILDCARD =>
+ Bind(
+ name,
+ Typed(
+ Ident(nme.WILDCARD).withPos(tree.pos.focus),
+ transform(tpt)
+ ).withPos(tree.pos.withStart(tree.pos.point))
+ ).withPos(tree.pos.withPoint(id.pos.point))
+ case Apply(fn @ Apply(_, _), args) =>
+ tree.derivedApply(transform(fn), transform(args))
+ case Apply(fn, args) =>
+ tree.derivedApply(fn, transform(args))
+ case Typed(expr, tpt) =>
+ tree.derivedTyped(transform(expr), transform(tpt))
+ case Bind(name, body) =>
+ tree.derivedBind(name, transform(body))
+ case AppliedTypeTree(tycon, args) =>
+ tree.derivedAppliedTypeTree(tycon, args map transform)
+ case Alternative(_) | Typed(_, _) | AndTypeTree(_, _) | Annotated(_, _) =>
+ super.transform(tree)
+ case Parens(_) =>
+ stripParens(tree)
+ case _ =>
+ tree
+ }
+ }
+
+ case class VariableInfo(name: Name, tree: Tree, pos: Position)
+
+ /** Traverse pattern and collect all variable names with their types in buffer
+ * The variables keep their positions; whereas the pattern is converted to be
+ * synthetic for all nodes that contain a variable position.
+ */
+ object getVars extends TreeAccumulator[ListBuffer[VariableInfo]] {
+
+ def namePos(tree: Tree, name: Name): Position =
+ if (name contains '$') tree.pos.focus
+ else {
+ val start = tree.pos.start
+ val end = start + name.decode.length
+ Position(start, end)
+ }
+
+ override def apply(buf: ListBuffer[VariableInfo], tree: Tree): ListBuffer[VariableInfo] = {
+ def seenName(name: Name) = buf exists (_.name == name)
+ def add(name: Name, t: Tree): ListBuffer[VariableInfo] =
+ if (seenName(name)) buf else buf += VariableInfo(name, t, namePos(tree, name))
+
+ tree match {
+ case Bind(nme.WILDCARD, _) =>
+ foldOver(buf, tree)
+ case Bind(name, Typed(tree1, tpt)) if !mayBeTypePat(tpt) =>
+ apply(add(name, tpt), tree1)
+ case Bind(name, tree1) =>
+ apply(add(name, TypeTree()), tree1)
+ case _ =>
+ foldOver(buf, tree)
+ }
+ }
+ }
+
+ /** Returns list of all pattern variables, possibly with their types,
+ * without duplicates
+ */
+ private def getVariables(tree: Tree): List[VariableInfo] =
+ getVars(new ListBuffer[VariableInfo], tree).toList
+
+ def byNameApplication(tpe: Tree): Tree =
+ AppliedTypeTree(scalaDot(tpnme.BYNAME_PARAM_CLASS), List(tpe))
+ def repeatedApplication(tpe: Tree): Tree =
+ AppliedTypeTree(scalaDot(tpnme.REPEATED_PARAM_CLASS), List(tpe))
+
+ def makeTuple(trees: List[Tree])(implicit cpos: Position): Tree = {
+ def mkPair(t1: Tree, t2: Tree) = {
+ if (t1.isType) AppliedTypeTree(scalaDot(tpnme.Pair), List(t1, t2))
+ else Pair(t1, t2)
+ }
+ trees reduce mkPair
+ }
+
+ def stripParens(t: Tree) = t match {
+ case Parens(t) => t
+ case _ => t
+ }
+
+ def makeSelfDef(name: TermName, tpt: Tree): ValDef =
+ ValDef(Modifiers(Private), name, tpt, EmptyTree())
+
+ /** If tree is a variable pattern, return its variable info.
+ * Otherwise return none.
+ */
+ private def matchVarPattern(tree: Tree): Option[VariableInfo] = {
+ def wildType(t: Tree): Option[Tree] = t match {
+ case Ident(x) if x.toTermName == nme.WILDCARD => Some(TypeTree())
+ case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt)
+ case _ => None
+ }
+ tree match {
+ case Ident(name) => Some(VariableInfo(name, TypeTree(), tree.pos))
+ case Bind(name, body) => wildType(body) map (x => VariableInfo(name, x, tree.pos))
+ case Typed(id @ Ident(name), tpt) => Some(VariableInfo(name, tpt, id.pos))
+ case _ => None
+ }
+ }
+
+ /** Create tree representing (unencoded) binary operation expression or pattern. */
+ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
+ def mkNamed(args: List[Tree]) =
+ if (isExpr) args map {
+ case arg @ Assign(Ident(name), rhs) => NamedArg(name, rhs).withPos(arg.pos)
+ case arg => arg
+ } else args
+ val arguments = right match {
+ case Parens(arg) => mkNamed(arg :: Nil)
+ case _ => right :: Nil
+ }
+ if (isExpr) {
+ if (isLeftAssoc(op)) {
+ Apply(Select(stripParens(left), op.encode).withPos(opPos), arguments)
+ } else {
+ val x = ctx.freshName().toTermName
+ Block(
+ List(ValDef(Modifiers(Synthetic), x, TypeTree(), stripParens(left))),
+ Apply(Select(stripParens(right), op.encode).withPos(opPos), List(Ident(x).withPos(left.pos))))
+ }
+ } else {
+ Apply(Ident(op.encode).withPos(opPos), stripParens(left) :: arguments)
+ }
+ }
+
+ /** tpt.<init> */
+ def SelectConstructor(tpt: Tree): Tree =
+ Select(tpt, nme.CONSTRUCTOR)
+
+ private def splitArgss(constr: Tree, outerArgss: List[List[Tree]]): (Tree, List[List[Tree]]) = constr match {
+ case Apply(tree, args) => splitArgss(tree, args :: outerArgss)
+ case _ => (constr, if (outerArgss.isEmpty) ListOfNil else outerArgss)
+ }
+
+ /** new tpt(argss_1)...(argss_n)
+ * @param npos the position spanning <new tpt>, without any arguments
+ */
+ def makeNew(parentConstr: Tree) = {
+ val (tpt, argss) = splitArgss(parentConstr, Nil)
+ New(tpt, argss)
+ }
+
+ /** Create positioned tree representing an object creation <new parents { self => stats }
+ */
+ def makeNew(templ: Template): Tree = {
+ val x = tpnme.ANON_CLASS
+ val nu = makeNew(Ident(x))
+ val clsDef = {
+ implicit val cpos = NoPosition
+ ClassDef(Modifiers(Final), x, Nil, templ)
+ }
+ Block(clsDef, nu)
+ }
+
+ /** Create positioned tree representing an object creation <new parents { self => stats }
+ * @param cpos the position of the new, focus should be the first parent's start.
+ */
+ def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree]): Tree = {
+ val newPos = Position(cpos.start, cpos.point)
+ val clsPos = Position(cpos.point, cpos.end)
+ if (parents.isEmpty)
+ makeNew(List(scalaAnyRefConstr.withPos(newPos.endPos)), self, stats)
+ else if (parents.tail.isEmpty && stats.isEmpty)
+ makeNew(parents.head)
+ else {
+ val x = tpnme.ANON_CLASS
+ val nu = makeNew(Ident(x).withPos(newPos)).withPos(newPos)
+ val clsDef = {
+ implicit val cpos = clsPos
+ ClassDef(Modifiers(Final), x, Nil, Template(???, parents, self, stats))
+ }
+ Block(clsDef, nu)
+ }
+ }
+
+ /** Create a tree representing an assignment <lhs = rhs> */
+ def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
+ case Apply(fn, args) =>
+ Apply(Select(fn, nme.update), args :+ rhs)
+ case _ =>
+ Assign(lhs, rhs)
+ }
+
+ /** A type tree corresponding to (possibly unary) intersection type
+ def makeIntersectionTypeTree(tps: List[Tree]): Tree =
+ if (tps.tail.isEmpty) tps.head
+ else CompoundTypeTree(Template(tps, emptyValDef, Nil))*/
+
+ private def labelDefAndCall(lname: TermName, rhs: Tree, call: Tree) = {
+ val ldef = DefDef(Modifiers(Label).withPos(cpos.startPos), lname, Nil, ListOfNil, TypeTree(), rhs)
+ Block(ldef, call)
+ }
+
+ private def labelCall(lname: TermName): Apply =
+ Apply(Ident(lname), Nil)
+
+ /** Create tree representing a while loop */
+ def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
+ val continu = labelCall(lname).withPos((cond.pos union body.pos).endPos)
+ val rhs = {
+ implicit val cpos = NoPosition
+ If(cond, Block(body, continu), Literal(Constant()).withPos(continu.pos))
+ }
+ labelDefAndCall(lname, rhs, continu)
+ }
+
+ /** Create tree representing a do-while loop */
+ def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
+ val continu = labelCall(lname).withPos((cond.pos union body.pos).endPos)
+ val rhs = Block(body, If(cond, continu, Literal(Constant()).withPos(continu.pos)))
+ labelDefAndCall(lname, rhs, continu)
+ }
+
+ /** Create block of statements `stats` */
+ def makeBlock(stats: List[Tree]): Tree =
+ if (stats.isEmpty) Literal(Constant())
+ else if (!stats.last.isTerm) Block(stats, Literal(Constant()).withPos(cpos.endPos))
+ else if (stats.length == 1) stats.head
+ else Block(stats.init, stats.last)
+
+ def makePatFilter(tree: Tree, condition: Tree, canDrop: Boolean): Tree = {
+ val cases = List(
+ CaseDef(condition, EmptyTree(), Literal(Constant(true))),
+ CaseDef(Ident(nme.WILDCARD), EmptyTree(), Literal(Constant(false)))
+ )
+ val matchTree = makeVisitor(cases, checkExhaustive = false, canDrop)
+ locally {
+ implicit val cpos = tree.pos
+ Apply(Select(tree, nme.withFilter), matchTree :: Nil)
+ }
+ }
+
+ /** Create tree for for-comprehension generator <pat <- rhs> or <pat = rhs> */
+ def makeGenerator(pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
+ val pat1 = patvarTransformer.transform(pat)
+ if (valeq) ValEq(pat1, rhs)
+ else ValFrom(pat1, makePatFilter(rhs, pat1, canDrop = true))
+ }
+
+/*
+ def makeSyntheticTypeParam(pname: TypeName, bounds: Tree) =
+ TypeDef(Modifiers(DEFERRED | SYNTHETIC), pname, Nil, bounds)
+*/
+ abstract class Enumerator { def pos: Position }
+ case class ValFrom(pat: Tree, rhs: Tree) extends Enumerator {
+ val pos = cpos union pat.pos union rhs.pos
+ }
+ case class ValEq(pat: Tree, rhs: Tree) extends Enumerator {
+ val pos = cpos union pat.pos union rhs.pos
+ }
+ case class Filter(test: Tree) extends Enumerator {
+ val pos = cpos union test.pos
+ }
+
+ /** Create tree for for-comprehension <for (enums) do body> or
+ * <for (enums) yield body> where mapName and flatMapName are chosen
+ * corresponding to whether this is a for-do or a for-yield.
+ * The creation performs the following rewrite rules:
+ *
+ * 1.
+ *
+ * for (P <- G) E ==> G.foreach (P => E)
+ *
+ * Here and in the following (P => E) is interpreted as the function (P => E)
+ * if P is a variable pattern and as the partial function { case P => E } otherwise.
+ *
+ * 2.
+ *
+ * for (P <- G) yield E ==> G.map (P => E)
+ *
+ * 3.
+ *
+ * for (P_1 <- G_1; P_2 <- G_2; ...) ...
+ * ==>
+ * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
+ *
+ * 4.
+ *
+ * for (P <- G; E; ...) ...
+ * =>
+ * for (P <- G.filter (P => E); ...) ...
+ *
+ * 5. For any N:
+ *
+ * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
+ * ==>
+ * for (TupleN(P_1, P_2, ... P_N) <-
+ * for (x_1 @ P_1 <- G) yield {
+ * val x_2 @ P_2 = E_2
+ * ...
+ * val x_N & P_N = E_N
+ * TupleN(x_1, ..., x_N)
+ * } ...)
+ *
+ * If any of the P_i are variable patterns, the corresponding `x_i @ P_i' is not generated
+ * and the variable constituting P_i is used instead of x_i
+ *
+ * @param mapName The name to be used for maps (either map or foreach)
+ * @param flatMapName The name to be used for flatMaps (either flatMap or foreach)
+ * @param enums The enumerators in the for expression
+ * @param body The body of the for expression
+ */
+ private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = {
+
+ /** make a closure pat => body.
+ * The closure is assigned a transparent position with the point at pos.point and
+ * the limits given by pat and body.
+ */
+ def makeClosure(pat: Tree, body: Tree): Tree =
+ matchVarPattern(pat) match {
+ case Some(VariableInfo(name, tpt, pos)) =>
+ Function(ValDef(Modifiers(Param).withPos(cpos.startPos), name.toTermName, tpt, EmptyTree()).withPos(pos) :: Nil, body)
+ case None =>
+ makeVisitor(List(CaseDef(pat, EmptyTree(), body)), checkExhaustive = false)
+ }
+
+ /** Make an application qual.meth(pat => body) positioned at `pos`.
+ */
+ def makeCombination(meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
+ Apply(Select(qual, meth).withPos(NoPosition), makeClosure(pat, body))
+
+ /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
+ */
+ def patternVar(pat: Tree): Option[Name] = pat match {
+ case Bind(name, _) => Some(name)
+ case _ => None
+ }
+
+ /** If `pat` is not yet a `Bind` wrap it in one with a fresh name
+ */
+ def makeBind(pat: Tree): Tree = pat match {
+ case Bind(_, _) => pat
+ case _ => Bind(ctx.freshName().toTermName, pat)
+ }
+
+ /** A reference to the name bound in Bind `pat`.
+ */
+ def makeValue(pat: Tree): Tree = pat match {
+ case Bind(name, _) => Ident(name).withPos(pat.pos.focus)
+ }
+
+ enums match {
+ case (enum @ ValFrom(pat, rhs)) :: Nil =>
+ makeCombination(mapName, rhs, pat, body).withPos(enum.pos)
+ case ValFrom(pat, rhs) :: (rest @ (ValFrom( _, _) :: _)) =>
+ makeCombination(flatMapName, rhs, pat,
+ makeFor(mapName, flatMapName, rest, body))
+ case (enum @ ValFrom(pat, rhs)) :: Filter(test) :: rest =>
+ makeFor(mapName, flatMapName,
+ ValFrom(pat, makeCombination(nme.withFilter, rhs, pat, test)) :: rest,
+ body)
+ case (enum @ ValFrom(pat, rhs)) :: rest =>
+ val (valeqs, rest1) = rest.span(_.isInstanceOf[ValEq])
+ assert(!valeqs.isEmpty)
+ val pats = valeqs map { case ValEq(pat, _) => pat }
+ val rhss = valeqs map { case ValEq(_, rhs) => rhs }
+ val defpat1 = makeBind(pat)
+ val defpats = pats map makeBind
+ val pdefs = (defpats, rhss).zipped flatMap (makePatDef)
+ val ids = (defpat1 :: defpats) map makeValue
+ val rhs1 = makeForYield(ValFrom(defpat1, rhs) :: Nil, Block(pdefs, makeTuple(ids)))
+ val allpats = pat :: pats
+ val vfrom1 = ValFrom(makeTuple(allpats), rhs1)
+ makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
+ case _ =>
+ EmptyTree() //may happen for erroneous input
+ }
+ }
+
+ /** Create tree for for-do comprehension <for (enums) body> */
+ def makeFor(enums: List[Enumerator], body: Tree): Tree =
+ makeFor(nme.foreach, nme.foreach, enums, body)
+
+ /** Create tree for for-yield comprehension <for (enums) yield body> */
+ def makeForYield(enums: List[Enumerator], body: Tree): Tree =
+ makeFor(nme.map, nme.flatMap, enums, body)
+
+ /** Create tree for a pattern alternative */
+ def makeAlternative(ts: List[Tree]): Tree = Alternative(ts flatMap alternatives)
+
+ def alternatives(t: Tree): List[Tree] = t match {
+ case Alternative(ts) => ts
+ case _ => List(t)
+ }
+
+ def mkAnnotated(cls: Symbol, tree: Tree) =
+ Annotated(TypedSplice(tpd.New(cls.typeRef)), tree)
+
+ /** Create visitor <x => x match cases> */
+ def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, canDrop: Boolean = false): Tree = {
+ val x = ctx.freshName().toTermName
+ val id = Ident(x)
+ val sel =
+ if (canDrop) mkAnnotated(???, id)
+ else if (!checkExhaustive) mkAnnotated(defn.UncheckedAnnot, id)
+ else id
+ Function(List(ugen.syntheticParameter(x)), Match(sel, cases))
+ }
+
+ /** Create tree for case definition <case pat if guard => rhs> */
+ def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef =
+ CaseDef(patvarTransformer.transform(pat), guard, rhs)
+
+ /** Create tree for pattern definition <val pat0 = rhs> */
+ def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
+ makePatDef(Modifiers(), pat, rhs)
+
+ /** Create tree for pattern definition <mods val pat0 = rhs> */
+ def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree, varsArePatterns: Boolean = false): List[Tree] = matchVarPattern(pat) match {
+ case Some(VariableInfo(name, tpt, pos)) if varsArePatterns =>
+ ValDef(mods, name.toTermName, tpt, rhs).withPos(pos) :: Nil // point comes from pat.pos
+
+ case _ =>
+ // in case there is exactly one variable x_1 in pattern
+ // val/var p = e ==> val/var x_1 = e.match (case p => (x_1))
+ //
+ // in case there are zero or more than one variables in pattern
+ // val/var p = e ==> private synthetic val t$ = e.match (case p => (x_1, ..., x_N))
+ // val/var x_1 = t$._1
+ // ...
+ // val/var x_N = t$._N
+
+ val rhsUnchecked = mkAnnotated(defn.UncheckedAnnot, rhs)
+
+ // TODO: clean this up -- there is too much information packed into makePatDef's `pat` argument
+ // when it's a simple identifier (case Some((name, tpt)) -- above),
+ // pat should have the type ascription that was specified by the user
+ // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
+ // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
+ // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
+ val (pat1, rhs1) = patvarTransformer.transform(pat) match {
+ // move the Typed ascription to the rhs
+ case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
+ val rhsTypedUnchecked =
+ if (tpt.isEmpty) rhsUnchecked else Typed(rhsUnchecked, tpt)
+ (expr, rhsTypedUnchecked)
+ case ok =>
+ (ok, rhsUnchecked)
+ }
+ val vars = getVariables(pat1)
+ val ids = vars map (v => Ident(v.name).withPos(v.pos))
+ val caseDef = CaseDef(pat1, EmptyTree(), makeTuple(ids))
+ val matchExpr = Match(rhs1, caseDef :: Nil)
+ vars match {
+ case List(VariableInfo(vname, tpt, pos)) =>
+ ValDef(mods, vname.toTermName, tpt, matchExpr) :: Nil
+ case _ =>
+ val tmpName = ctx.freshName().toTermName
+ val patMods = Modifiers(PrivateLocal | Synthetic | (mods.flags & Lazy))
+ val firstDef = ValDef(patMods, tmpName, TypeTree(), matchExpr)
+ val restDefs = for {
+ (VariableInfo(vname, tpt, pos), n) <- vars.zipWithIndex
+ } yield {
+ val rhs = {
+ implicit val cpos = pos.focus
+ Select(Ident(tmpName), ("_" + n).toTermName)
+ }
+ ValDef(mods, vname.toTermName, tpt, rhs).withPos(pos)
+ }
+ firstDef :: restDefs
+ }
+ }
+
+ /** Create a tree representing the function type (argtpes) => restpe */
+ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
+ AppliedTypeTree(scalaDot(("Function" + argtpes.length).toTypeName), argtpes ::: List(restpe))
+
+ /** Append implicit parameter section if `contextBounds` nonempty */
+ def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
+ if (contextBounds.isEmpty) vparamss
+ else {
+ val mods = Modifiers(if (owner.isTypeName) PrivateLocal | ParamAccessor else Param)
+ val evidenceParams = for (tpt <- contextBounds) yield {
+ val pname = ctx.freshName(nme.EVIDENCE_PARAM_PREFIX).toTermName
+ ValDef(mods | Implicit | Synthetic, pname, tpt, EmptyTree())
+ }
+ vparamss.reverse match {
+ case (vparams @ (vparam :: _)) :: _ if vparam.mods is Implicit =>
+ vparamss.init :+ (evidenceParams ++ vparams)
+ case _ =>
+ vparamss :+ evidenceParams
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Utility.scala b/compiler/src/dotty/tools/dotc/parsing/Utility.scala
new file mode 100644
index 000000000..f522492f8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Utility.scala
@@ -0,0 +1,170 @@
+package dotty.tools.dotc.parsing
+
+import scala.collection.mutable
+
+
+/**
+ * The `Utility` object provides utility functions for processing instances
+ * of bound and not bound XML classes, as well as escaping text nodes.
+ *
+ * @author Burak Emir
+ */
+object Utility {
+ import scala.reflect.internal.Chars.SU
+
+ private val unescMap = Map(
+ "lt" -> '<',
+ "gt" -> '>',
+ "amp" -> '&',
+ "quot" -> '"',
+ "apos" -> '\''
+ )
+
+ /**
+ * Appends unescaped string to `s`, `amp` becomes `&amp;`,
+ * `lt` becomes `&lt;` etc..
+ *
+ * @return `'''null'''` if `ref` was not a predefined entity.
+ */
+ private final def unescape(ref: String, s: StringBuilder): StringBuilder =
+ ((unescMap get ref) map (s append _)).orNull
+
+ def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = {
+ val sb = new StringBuilder
+ var rfb: StringBuilder = null
+ val nb = new mutable.ListBuffer[T]()
+
+ val it = value.iterator
+ while (it.hasNext) {
+ var c = it.next()
+ // entity! flush buffer into text node
+ if (c == '&') {
+ c = it.next()
+ if (c == '#') {
+ c = it.next()
+ val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+ sb.append(theChar)
+ }
+ else {
+ if (rfb eq null) rfb = new StringBuilder()
+ rfb append c
+ c = it.next()
+ while (c != ';') {
+ rfb.append(c)
+ c = it.next()
+ }
+ val ref = rfb.toString()
+ rfb.clear()
+ unescape(ref,sb) match {
+ case null =>
+ if (!sb.isEmpty) { // flush buffer
+ nb += text(sb.toString())
+ sb.clear()
+ }
+ nb += entityRef(ref) // add entityref
+ case _ =>
+ }
+ }
+ }
+ else sb append c
+ }
+
+ if (!sb.isEmpty) // flush buffer
+ nb += text(sb.toString())
+
+ nb.toList
+ }
+
+ /**
+ * {{{
+ * CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
+ * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ * }}}
+ * See [66]
+ */
+ def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
+ val hex = ch() == 'x'
+ if (hex) nextch()
+ val base = if (hex) 16 else 10
+ var i = 0
+ while (ch() != ';') {
+ ch() match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ i = i * base + ch().asDigit
+ case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
+ | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
+ if (! hex)
+ reportSyntaxError("hex char not allowed in decimal char ref\n" +
+ "Did you mean to write &#x ?")
+ else
+ i = i * base + ch().asDigit
+ case SU =>
+ reportTruncatedError("")
+ case _ =>
+ reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
+ }
+ nextch()
+ }
+ new String(Array(i), 0, 1)
+ }
+
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)
+ * }}} */
+ final def isSpace(ch: Char): Boolean = ch match {
+ case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
+ case _ => false
+ }
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)+
+ * }}} */
+ final def isSpace(cs: Seq[Char]): Boolean = cs.nonEmpty && (cs forall isSpace)
+
+ /** {{{
+ * NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
+ * | CombiningChar | Extender
+ * }}}
+ * See [4] and Appendix B of XML 1.0 specification.
+ */
+ def isNameChar(ch: Char) = {
+ import java.lang.Character._
+ // The constants represent groups Mc, Me, Mn, Lm, and Nd.
+
+ isNameStart(ch) || (getType(ch).toByte match {
+ case COMBINING_SPACING_MARK |
+ ENCLOSING_MARK | NON_SPACING_MARK |
+ MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
+ case _ => ".-:" contains ch
+ })
+ }
+
+ /** {{{
+ * NameStart ::= ( Letter | '_' )
+ * }}}
+ * where Letter means in one of the Unicode general
+ * categories `{ Ll, Lu, Lo, Lt, Nl }`.
+ *
+ * We do not allow a name to start with `:`.
+ * See [3] and Appendix B of XML 1.0 specification
+ */
+ def isNameStart(ch: Char) = {
+ import java.lang.Character._
+
+ getType(ch).toByte match {
+ case LOWERCASE_LETTER |
+ UPPERCASE_LETTER | OTHER_LETTER |
+ TITLECASE_LETTER | LETTER_NUMBER => true
+ case _ => ch == '_'
+ }
+ }
+
+ /** {{{
+ * Name ::= ( Letter | '_' ) (NameChar)*
+ * }}}
+ * See [5] of XML 1.0 specification.
+ */
+ def isName(s: String) =
+ s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar)
+
+}
+
diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala
new file mode 100644
index 000000000..8b113ed96
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/package.scala
@@ -0,0 +1,33 @@
+package dotty.tools.dotc
+
+import util.Chars._
+import core.Names.Name
+import core.StdNames.nme
+import core.NameOps._
+
+package object parsing {
+
+ def precedence(operator: Name): Int =
+ if (operator eq nme.ERROR) -1
+ else {
+ val firstCh = operator(0)
+ if (isScalaLetter(firstCh)) 1
+ else if (operator.isOpAssignmentName) 0
+ else firstCh match {
+ case '|' => 2
+ case '^' => 3
+ case '&' => 4
+ case '=' | '!' => 5
+ case '<' | '>' => 6
+ case ':' => 7
+ case '+' | '-' => 8
+ case '*' | '/' | '%' => 9
+ case _ => 10
+ }
+ }
+
+ def minPrec = 0
+ def minInfixPrec = 1
+ def maxPrec = 11
+
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
new file mode 100644
index 000000000..b321d3736
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
@@ -0,0 +1,258 @@
+package dotty.tools.dotc
+package printing
+
+import core._
+import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Contexts._
+import collection.mutable
+import collection.Map
+import Decorators._
+import scala.annotation.switch
+import scala.util.control.NonFatal
+import reporting.diagnostic.MessageContainer
+import util.DiffUtil
+import Highlighting._
+import SyntaxHighlighting._
+
+object Formatting {
+
+ /** General purpose string formatter, with the following features:
+ *
+ * 1) On all Showables, `show` is called instead of `toString`
+ * 2) Exceptions raised by a `show` are handled by falling back to `toString`.
+ * 3) Sequences can be formatted using the desired separator between two `%` signs,
+ * eg `i"myList = (${myList}%, %)"`
+ * 4) Safe handling of multi-line margins. Left margins are skipped om the parts
+ * of the string context *before* inserting the arguments. That way, we guard
+ * against accidentally treating an interpolated value as a margin.
+ */
+ class StringFormatter(protected val sc: StringContext) {
+
+ protected def showArg(arg: Any)(implicit ctx: Context): String = arg match {
+ case arg: Showable =>
+ try arg.show
+ catch {
+ case NonFatal(ex) => s"[cannot display due to $ex, raw string = $toString]"
+ }
+ case _ => arg.toString
+ }
+
+ private def treatArg(arg: Any, suffix: String)(implicit ctx: Context): (Any, String) = arg match {
+ case arg: Seq[_] if suffix.nonEmpty && suffix.head == '%' =>
+ val (rawsep, rest) = suffix.tail.span(_ != '%')
+ val sep = StringContext.treatEscapes(rawsep)
+ if (rest.nonEmpty) (arg.map(showArg).mkString(sep), rest.tail)
+ else (arg, suffix)
+ case _ =>
+ (showArg(arg), suffix)
+ }
+
+ def assemble(args: Seq[Any])(implicit ctx: Context): String = {
+ def isLineBreak(c: Char) = c == '\n' || c == '\f' // compatible with StringLike#isLineBreak
+ def stripTrailingPart(s: String) = {
+ val (pre, post) = s.span(c => !isLineBreak(c))
+ pre ++ post.stripMargin
+ }
+ val (prefix, suffixes) = sc.parts.toList match {
+ case head :: tail => (head.stripMargin, tail map stripTrailingPart)
+ case Nil => ("", Nil)
+ }
+ val (args1, suffixes1) = (args, suffixes).zipped.map(treatArg(_, _)).unzip
+ new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*)
+ }
+ }
+
+ /** The `em` string interpolator works like the `i` string interpolator, but marks nonsensical errors
+ * using `<nonsensical>...</nonsensical>` tags.
+ * Note: Instead of these tags, it would be nicer to return a data structure containing the message string
+ * and a boolean indicating whether the message is sensical, but then we cannot use string operations
+ * like concatenation, stripMargin etc on the values returned by em"...", and in the current error
+ * message composition methods, this is crucial.
+ */
+ class ErrorMessageFormatter(sc: StringContext) extends StringFormatter(sc) {
+ override protected def showArg(arg: Any)(implicit ctx: Context): String =
+ wrapNonSensical(arg, super.showArg(arg))
+ }
+
+ class SyntaxFormatter(sc: StringContext) extends StringFormatter(sc) {
+ override protected def showArg(arg: Any)(implicit ctx: Context): String =
+ arg match {
+ case arg: Showable if ctx.settings.color.value != "never" =>
+ val highlighted =
+ SyntaxHighlighting(wrapNonSensical(arg, super.showArg(arg)))
+ new String(highlighted.toArray)
+ case hl: Highlight =>
+ hl.show
+ case hb: HighlightBuffer =>
+ hb.toString
+ case str: String if ctx.settings.color.value != "never" =>
+ new String(SyntaxHighlighting(str).toArray)
+ case _ => super.showArg(arg)
+ }
+ }
+
+ private def wrapNonSensical(arg: Any /* Type | Symbol */, str: String)(implicit ctx: Context): String = {
+ import MessageContainer._
+ def isSensical(arg: Any): Boolean = arg match {
+ case tpe: Type =>
+ tpe.exists && !tpe.isErroneous
+ case sym: Symbol if sym.isCompleted =>
+ sym.info != ErrorType && sym.info != TypeAlias(ErrorType) && sym.info.exists
+ case _ => true
+ }
+
+ if (isSensical(arg)) str
+ else nonSensicalStartTag + str + nonSensicalEndTag
+ }
+
+ private type Recorded = AnyRef /*Symbol | PolyParam*/
+
+ private class Seen extends mutable.HashMap[String, List[Recorded]] {
+
+ override def default(key: String) = Nil
+
+ def record(str: String, entry: Recorded)(implicit ctx: Context): String = {
+ def followAlias(e1: Recorded): Recorded = e1 match {
+ case e1: Symbol if e1.isAliasType =>
+ val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol
+ if (underlying.name == e1.name) underlying else e1
+ case _ => e1
+ }
+ lazy val dealiased = followAlias(entry)
+ var alts = apply(str).dropWhile(alt => dealiased ne followAlias(alt))
+ if (alts.isEmpty) {
+ alts = entry :: apply(str)
+ update(str, alts)
+ }
+ str + "'" * (alts.length - 1)
+ }
+ }
+
+ private class ExplainingPrinter(seen: Seen)(_ctx: Context) extends RefinedPrinter(_ctx) {
+ override def simpleNameString(sym: Symbol): String =
+ if ((sym is ModuleClass) && sym.sourceModule.exists) simpleNameString(sym.sourceModule)
+ else seen.record(super.simpleNameString(sym), sym)
+
+ override def polyParamNameString(param: PolyParam): String =
+ seen.record(super.polyParamNameString(param), param)
+ }
+
+ /** Create explanation for single `Recorded` type or symbol */
+ def explanation(entry: AnyRef)(implicit ctx: Context): String = {
+ def boundStr(bound: Type, default: ClassSymbol, cmp: String) =
+ if (bound.isRef(default)) "" else i"$cmp $bound"
+
+ def boundsStr(bounds: TypeBounds): String = {
+ val lo = boundStr(bounds.lo, defn.NothingClass, ">:")
+ val hi = boundStr(bounds.hi, defn.AnyClass, "<:")
+ if (lo.isEmpty) hi
+ else if (hi.isEmpty) lo
+ else s"$lo and $hi"
+ }
+
+ def addendum(cat: String, info: Type): String = info match {
+ case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty =>
+ if (lo eq hi) i" which is an alias of $lo"
+ else i" with $cat ${boundsStr(bounds)}"
+ case _ =>
+ ""
+ }
+
+ entry match {
+ case param: PolyParam =>
+ s"is a type variable${addendum("constraint", ctx.typeComparer.bounds(param))}"
+ case sym: Symbol =>
+ s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", sym.info)}"
+ }
+ }
+
+ /** Turns a `Seen` into a `String` to produce an explanation for types on the
+ * form `where: T is...`
+ *
+ * @return string disambiguating types
+ */
+ private def explanations(seen: Seen)(implicit ctx: Context): String = {
+ def needsExplanation(entry: Recorded) = entry match {
+ case param: PolyParam => ctx.typerState.constraint.contains(param)
+ case _ => false
+ }
+
+ val toExplain: List[(String, Recorded)] = seen.toList.flatMap {
+ case (str, entry :: Nil) =>
+ if (needsExplanation(entry)) (str, entry) :: Nil else Nil
+ case (str, entries) =>
+ entries.map(alt => (seen.record(str, alt), alt))
+ }.sortBy(_._1)
+
+ def columnar(parts: List[(String, String)]): List[String] = {
+ lazy val maxLen = parts.map(_._1.length).max
+ parts.map {
+ case (leader, trailer) =>
+ val variable = hl"$leader"
+ s"""$variable${" " * (maxLen - leader.length)} $trailer"""
+ }
+ }
+
+ val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) }
+ val explainLines = columnar(explainParts)
+ if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n"
+ }
+
+ /** Context with correct printer set for explanations */
+ private def explainCtx(seen: Seen)(implicit ctx: Context): Context = ctx.printer match {
+ case dp: ExplainingPrinter =>
+ ctx // re-use outer printer and defer explanation to it
+ case _ => ctx.fresh.setPrinterFn(ctx => new ExplainingPrinter(seen)(ctx))
+ }
+
+ /** Entrypoint for explanation string interpolator:
+ *
+ * ```
+ * ex"disambiguate $tpe1 and $tpe2"
+ * ```
+ */
+ def explained2(op: Context => String)(implicit ctx: Context): String = {
+ val seen = new Seen
+ op(explainCtx(seen)) ++ explanations(seen)
+ }
+
+ /** When getting a type mismatch it is useful to disambiguate placeholders like:
+ *
+ * ```
+ * found: List[Int]
+ * required: List[T]
+ * where: T is a type in the initalizer of value s which is an alias of
+ * String
+ * ```
+ *
+ * @return the `where` section as well as the printing context for the
+ * placeholders - `("T is a...", printCtx)`
+ */
+ def disambiguateTypes(args: Type*)(implicit ctx: Context): (String, Context) = {
+ val seen = new Seen
+ val printCtx = explainCtx(seen)
+ args.foreach(_.show(printCtx)) // showing each member will put it into `seen`
+ (explanations(seen), printCtx)
+ }
+
+ /** This method will produce a colored type diff from the given arguments.
+ * The idea is to do this for known cases that are useful and then fall back
+ * on regular syntax highlighting for the cases which are unhandled.
+ *
+ * Please not that if used in combination with `disambiguateTypes` the
+ * correct `Context` for printing should also be passed when calling the
+ * method.
+ *
+ * @return the (found, expected, changePercentage) with coloring to
+ * highlight the difference
+ */
+ def typeDiff(found: Type, expected: Type)(implicit ctx: Context): (String, String) = {
+ val fnd = wrapNonSensical(found, found.show)
+ val exp = wrapNonSensical(expected, expected.show)
+
+ DiffUtil.mkColoredTypeDiff(fnd, exp) match {
+ case _ if ctx.settings.color.value == "never" => (fnd, exp)
+ case (fnd, exp, change) if change < 0.5 => (fnd, exp)
+ case _ => (fnd, exp)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala
new file mode 100644
index 000000000..3bda7fb7a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala
@@ -0,0 +1,77 @@
+package dotty.tools
+package dotc
+package printing
+
+import scala.collection.mutable
+import core.Contexts.Context
+
+object Highlighting {
+
+ implicit def highlightShow(h: Highlight)(implicit ctx: Context): String =
+ h.show
+
+ abstract class Highlight(private val highlight: String) {
+ def text: String
+
+ def show(implicit ctx: Context) =
+ if (ctx.settings.color.value == "never") text
+ else highlight + text + Console.RESET
+
+ override def toString =
+ highlight + text + Console.RESET
+
+ def +(other: Highlight)(implicit ctx: Context): HighlightBuffer =
+ new HighlightBuffer(this) + other
+
+ def +(other: String)(implicit ctx: Context): HighlightBuffer =
+ new HighlightBuffer(this) + other
+ }
+
+ abstract class Modifier(private val mod: String, text: String) extends Highlight(Console.RESET) {
+ override def show(implicit ctx: Context) =
+ if (ctx.settings.color.value == "never") ""
+ else mod + super.show
+ }
+
+ case class HighlightBuffer(hl: Highlight)(implicit ctx: Context) {
+ val buffer = new mutable.ListBuffer[String]
+
+ buffer += hl.show
+
+ def +(other: Highlight): HighlightBuffer = {
+ buffer += other.show
+ this
+ }
+
+ def +(other: String): HighlightBuffer = {
+ buffer += other
+ this
+ }
+
+ override def toString =
+ buffer.mkString
+ }
+
+ case class NoColor(text: String) extends Highlight(Console.RESET)
+
+ case class Red(text: String) extends Highlight(Console.RED)
+ case class Blue(text: String) extends Highlight(Console.BLUE)
+ case class Cyan(text: String) extends Highlight(Console.CYAN)
+ case class Black(text: String) extends Highlight(Console.BLACK)
+ case class Green(text: String) extends Highlight(Console.GREEN)
+ case class White(text: String) extends Highlight(Console.WHITE)
+ case class Yellow(text: String) extends Highlight(Console.YELLOW)
+ case class Magenta(text: String) extends Highlight(Console.MAGENTA)
+
+ case class RedB(text: String) extends Highlight(Console.RED_B)
+ case class BlueB(text: String) extends Highlight(Console.BLUE_B)
+ case class CyanB(text: String) extends Highlight(Console.CYAN_B)
+ case class BlackB(text: String) extends Highlight(Console.BLACK_B)
+ case class GreenB(text: String) extends Highlight(Console.GREEN_B)
+ case class WhiteB(text: String) extends Highlight(Console.WHITE_B)
+ case class YellowB(text: String) extends Highlight(Console.YELLOW_B)
+ case class MagentaB(text: String) extends Highlight(Console.MAGENTA_B)
+
+ case class Bold(text: String) extends Modifier(Console.BOLD, text)
+ case class Underlined(text: String) extends Modifier(Console.UNDERLINED, text)
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
new file mode 100644
index 000000000..15c382bb0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -0,0 +1,500 @@
+package dotty.tools.dotc
+package printing
+
+import core._
+import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, Denotations._
+import Contexts.Context, Scopes.Scope, Denotations.Denotation, Annotations.Annotation
+import StdNames.{nme, tpnme}
+import ast.Trees._, ast._
+import config.Config
+import java.lang.Integer.toOctalString
+import config.Config.summarizeDepth
+import scala.annotation.switch
+
+class PlainPrinter(_ctx: Context) extends Printer {
+ protected[this] implicit def ctx: Context = _ctx.addMode(Mode.Printing)
+
+ private var openRecs: List[RecType] = Nil
+
+ protected def maxToTextRecursions = 100
+
+ protected final def controlled(op: => Text): Text =
+ if (ctx.toTextRecursions < maxToTextRecursions && ctx.toTextRecursions < maxSummarized)
+ try {
+ ctx.toTextRecursions += 1
+ op
+ } finally {
+ ctx.toTextRecursions -= 1
+ }
+ else {
+ if (ctx.toTextRecursions >= maxToTextRecursions)
+ recursionLimitExceeded()
+ "..."
+ }
+
+ protected def recursionLimitExceeded() = {
+ ctx.warning("Exceeded recursion depth attempting to print.")
+ if (ctx.debug) Thread.dumpStack()
+ }
+
+ /** If true, tweak output so it is the same before and after pickling */
+ protected def homogenizedView: Boolean = ctx.settings.YtestPickler.value
+
+ def homogenize(tp: Type): Type =
+ if (homogenizedView)
+ tp match {
+ case tp: ThisType if tp.cls.is(Package) && !tp.cls.isEffectiveRoot =>
+ ctx.requiredPackage(tp.cls.fullName).termRef
+ case tp: TypeVar if tp.isInstantiated =>
+ homogenize(tp.instanceOpt)
+ case AndType(tp1, tp2) =>
+ homogenize(tp1) & homogenize(tp2)
+ case OrType(tp1, tp2) =>
+ homogenize(tp1) | homogenize(tp2)
+ case tp: SkolemType =>
+ homogenize(tp.info)
+ case tp: LazyRef =>
+ homogenize(tp.ref)
+ case HKApply(tycon, args) =>
+ tycon.dealias.appliedTo(args)
+ case _ =>
+ tp
+ }
+ else tp
+
+ private def selfRecName(n: Int) = s"z$n"
+
+ /** Render elements alternating with `sep` string */
+ protected def toText(elems: Traversable[Showable], sep: String) =
+ Text(elems map (_ toText this), sep)
+
+ /** Render element within highest precedence */
+ protected def toTextLocal(elem: Showable): Text =
+ atPrec(DotPrec) { elem.toText(this) }
+
+ /** Render element within lowest precedence */
+ protected def toTextGlobal(elem: Showable): Text =
+ atPrec(GlobalPrec) { elem.toText(this) }
+
+ protected def toTextLocal(elems: Traversable[Showable], sep: String) =
+ atPrec(DotPrec) { toText(elems, sep) }
+
+ protected def toTextGlobal(elems: Traversable[Showable], sep: String) =
+ atPrec(GlobalPrec) { toText(elems, sep) }
+
+ /** If the name of the symbol's owner should be used when you care about
+ * seeing an interesting name: in such cases this symbol is e.g. a method
+ * parameter with a synthetic name, a constructor named "this", an object
+ * "package", etc. The kind string, if non-empty, will be phrased relative
+ * to the name of the owner.
+ */
+ protected def hasMeaninglessName(sym: Symbol) = (
+ (sym is Param) && sym.owner.isSetter // x$1
+ || sym.isClassConstructor // this
+ || (sym.name == nme.PACKAGE) // package
+ )
+
+ def nameString(name: Name): String = name.toString + {
+ if (ctx.settings.debugNames.value)
+ if (name.isTypeName) "/T" else "/V"
+ else ""
+ }
+
+ def toText(name: Name): Text = Str(nameString(name))
+
+ /** String representation of a name used in a refinement
+ * In refined printing this undoes type parameter expansion
+ */
+ protected def refinementNameString(tp: RefinedType) = nameString(tp.refinedName)
+
+ /** String representation of a refinement */
+ protected def toTextRefinement(rt: RefinedType) =
+ (refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close
+
+ protected def argText(arg: Type): Text = arg match {
+ case arg: TypeBounds => "_" ~ toTextGlobal(arg)
+ case _ => toTextGlobal(arg)
+ }
+
+ /** The longest sequence of refinement types, starting at given type
+ * and following parents.
+ */
+ private def refinementChain(tp: Type): List[Type] =
+ tp :: (tp match {
+ case tp: RefinedType => refinementChain(tp.parent.stripTypeVar)
+ case _ => Nil
+ })
+
+ def toText(tp: Type): Text = controlled {
+ homogenize(tp) match {
+ case tp: TypeType =>
+ toTextRHS(tp)
+ case tp: TermRef
+ if !tp.denotationIsCurrent && !homogenizedView || // always print underyling when testing picklers
+ tp.symbol.is(Module) ||
+ tp.symbol.name.isImportName =>
+ toTextRef(tp) ~ ".type"
+ case tp: TermRef if tp.denot.isOverloaded =>
+ "<overloaded " ~ toTextRef(tp) ~ ">"
+ case tp: SingletonType =>
+ toTextLocal(tp.underlying) ~ "(" ~ toTextRef(tp) ~ ")"
+ case tp: TypeRef =>
+ toTextPrefix(tp.prefix) ~ selectionString(tp)
+ case tp: RefinedType =>
+ val parent :: (refined: List[RefinedType @unchecked]) =
+ refinementChain(tp).reverse
+ toTextLocal(parent) ~ "{" ~ Text(refined map toTextRefinement, "; ").close ~ "}"
+ case tp: RecType =>
+ try {
+ openRecs = tp :: openRecs
+ "{" ~ selfRecName(openRecs.length) ~ " => " ~ toTextGlobal(tp.parent) ~ "}"
+ }
+ finally openRecs = openRecs.tail
+ case AndType(tp1, tp2) =>
+ changePrec(AndPrec) { toText(tp1) ~ " & " ~ toText(tp2) }
+ case OrType(tp1, tp2) =>
+ changePrec(OrPrec) { toText(tp1) ~ " | " ~ toText(tp2) }
+ case ErrorType =>
+ "<error>"
+ case tp: WildcardType =>
+ if (tp.optBounds.exists) "(?" ~ toTextRHS(tp.bounds) ~ ")" else "?"
+ case NoType =>
+ "<notype>"
+ case NoPrefix =>
+ "<noprefix>"
+ case tp: MethodType =>
+ def paramText(name: TermName, tp: Type) = toText(name) ~ ": " ~ toText(tp)
+ changePrec(GlobalPrec) {
+ (if (tp.isImplicit) "(implicit " else "(") ~
+ Text((tp.paramNames, tp.paramTypes).zipped map paramText, ", ") ~
+ ")" ~ toText(tp.resultType)
+ }
+ case tp: ExprType =>
+ changePrec(GlobalPrec) { "=> " ~ toText(tp.resultType) }
+ case tp: PolyType =>
+ def paramText(variance: Int, name: Name, bounds: TypeBounds): Text =
+ varianceString(variance) ~ name.toString ~ toText(bounds)
+ changePrec(GlobalPrec) {
+ "[" ~ Text((tp.variances, tp.paramNames, tp.paramBounds).zipped.map(paramText), ", ") ~
+ "] => " ~ toTextGlobal(tp.resultType)
+ }
+ case tp: PolyParam =>
+ polyParamNameString(tp) ~ polyHash(tp.binder)
+ case AnnotatedType(tpe, annot) =>
+ toTextLocal(tpe) ~ " " ~ toText(annot)
+ case HKApply(tycon, args) =>
+ toTextLocal(tycon) ~ "[" ~ Text(args.map(argText), ", ") ~ "]"
+ case tp: TypeVar =>
+ if (tp.isInstantiated)
+ toTextLocal(tp.instanceOpt) ~ "^" // debug for now, so that we can see where the TypeVars are.
+ else {
+ val constr = ctx.typerState.constraint
+ val bounds =
+ if (constr.contains(tp)) constr.fullBounds(tp.origin)(ctx.addMode(Mode.Printing))
+ else TypeBounds.empty
+ if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")"
+ else toText(tp.origin)
+ }
+ case tp: LazyRef =>
+ "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")" // TODO: only print this during debug mode?
+ case _ =>
+ tp.fallbackToText(this)
+ }
+ }.close
+
+ protected def polyParamNameString(name: TypeName): String = name.toString
+
+ protected def polyParamNameString(param: PolyParam): String = polyParamNameString(param.binder.paramNames(param.paramNum))
+
+ /** The name of the symbol without a unique id. Under refined printing,
+ * the decoded original name.
+ */
+ protected def simpleNameString(sym: Symbol): String = nameString(sym.name)
+
+ /** If -uniqid is set, the hashcode of the polytype, after a # */
+ protected def polyHash(pt: PolyType): Text =
+ if (ctx.settings.uniqid.value) "#" + pt.hashCode else ""
+
+ /** If -uniqid is set, the unique id of symbol, after a # */
+ protected def idString(sym: Symbol): String =
+ if (ctx.settings.uniqid.value) "#" + sym.id else ""
+
+ def nameString(sym: Symbol): String =
+ simpleNameString(sym) + idString(sym) // + "<" + (if (sym.exists) sym.owner else "") + ">"
+
+ def fullNameString(sym: Symbol): String =
+ if (sym.isRoot || sym == NoSymbol || sym.owner.isEffectiveRoot)
+ nameString(sym)
+ else
+ fullNameString(fullNameOwner(sym)) + "." + nameString(sym)
+
+ protected def fullNameOwner(sym: Symbol): Symbol = sym.effectiveOwner.enclosingClass
+
+ protected def objectPrefix = "object "
+ protected def packagePrefix = "package "
+
+ protected def trimPrefix(text: Text) =
+ text.stripPrefix(objectPrefix).stripPrefix(packagePrefix)
+
+ protected def selectionString(tp: NamedType) = {
+ val sym = if (homogenizedView) tp.symbol else tp.currentSymbol
+ if (sym.exists) nameString(sym) else nameString(tp.name)
+ }
+
+ /** The string representation of this type used as a prefix */
+ protected def toTextRef(tp: SingletonType): Text = controlled {
+ tp match {
+ case tp: TermRef =>
+ toTextPrefix(tp.prefix) ~ selectionString(tp)
+ case tp: ThisType =>
+ nameString(tp.cls) + ".this"
+ case SuperType(thistpe: SingletonType, _) =>
+ toTextRef(thistpe).map(_.replaceAll("""\bthis$""", "super"))
+ case SuperType(thistpe, _) =>
+ "Super(" ~ toTextGlobal(thistpe) ~ ")"
+ case tp @ ConstantType(value) =>
+ toText(value)
+ case MethodParam(mt, idx) =>
+ nameString(mt.paramNames(idx))
+ case tp: RecThis =>
+ val idx = openRecs.reverse.indexOf(tp.binder)
+ if (idx >= 0) selfRecName(idx + 1)
+ else "{...}.this" // TODO move underlying type to an addendum, e.g. ... z3 ... where z3: ...
+ case tp: SkolemType =>
+ if (homogenizedView) toText(tp.info) else tp.repr
+ }
+ }
+
+ /** The string representation of this type used as a prefix */
+ protected def toTextPrefix(tp: Type): Text = controlled {
+ homogenize(tp) match {
+ case NoPrefix => ""
+ case tp: SingletonType => toTextRef(tp) ~ "."
+ case tp => trimPrefix(toTextLocal(tp)) ~ "#"
+ }
+ }
+
+ protected def isOmittablePrefix(sym: Symbol): Boolean =
+ defn.UnqualifiedOwnerTypes.exists(_.symbol == sym) || isEmptyPrefix(sym)
+
+ protected def isEmptyPrefix(sym: Symbol): Boolean =
+ sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName
+
+ /** String representation of a definition's type following its name,
+ * if symbol is completed, "?" otherwise.
+ */
+ protected def toTextRHS(optType: Option[Type]): Text = optType match {
+ case Some(tp) => toTextRHS(tp)
+ case None => "?"
+ }
+
+ /** String representation of a definition's type following its name */
+ protected def toTextRHS(tp: Type): Text = controlled {
+ homogenize(tp) match {
+ case tp @ TypeBounds(lo, hi) =>
+ if (lo eq hi) {
+ val eql =
+ if (tp.variance == 1) " =+ "
+ else if (tp.variance == -1) " =- "
+ else " = "
+ eql ~ toText(lo)
+ }
+ else
+ (if (lo isRef defn.NothingClass) Text() else " >: " ~ toText(lo)) ~
+ (if (hi isRef defn.AnyClass) Text() else " <: " ~ toText(hi))
+ case tp @ ClassInfo(pre, cls, cparents, decls, selfInfo) =>
+ val preText = toTextLocal(pre)
+ val (tparams, otherDecls) = decls.toList partition treatAsTypeParam
+ val tparamsText =
+ if (tparams.isEmpty) Text() else ("[" ~ dclsText(tparams) ~ "]").close
+ val selfText: Text = selfInfo match {
+ case NoType => Text()
+ case sym: Symbol if !sym.isCompleted => "this: ? =>"
+ case _ => "this: " ~ atPrec(InfixPrec) { toText(tp.selfType) } ~ " =>"
+ }
+ val trueDecls = otherDecls.filterNot(treatAsTypeArg)
+ val declsText =
+ if (trueDecls.isEmpty || !ctx.settings.debug.value) Text()
+ else dclsText(trueDecls)
+ tparamsText ~ " extends " ~ toTextParents(tp.parents) ~ "{" ~ selfText ~ declsText ~
+ "} at " ~ preText
+ case tp =>
+ ": " ~ toTextGlobal(tp)
+ }
+ }
+
+ protected def toTextParents(parents: List[Type]): Text = Text(parents.map(toTextLocal), " with ")
+
+ protected def treatAsTypeParam(sym: Symbol): Boolean = false
+ protected def treatAsTypeArg(sym: Symbol): Boolean = false
+
+ /** String representation of symbol's kind. */
+ def kindString(sym: Symbol): String = {
+ val flags = sym.flagsUNSAFE
+ if (flags is PackageClass) "package class"
+ else if (flags is PackageVal) "package"
+ else if (sym.isPackageObject)
+ if (sym.isClass) "package object class"
+ else "package object"
+ else if (sym.isAnonymousClass) "anonymous class"
+ else if (flags is ModuleClass) "module class"
+ else if (flags is ModuleVal) "module"
+ else if (flags is ImplClass) "implementation class"
+ else if (flags is Trait) "trait"
+ else if (sym.isClass) "class"
+ else if (sym.isType) "type"
+ else if (sym.isGetter) "getter"
+ else if (sym.isSetter) "setter"
+ else if (flags is Lazy) "lazy value"
+ else if (flags is Mutable) "variable"
+ else if (sym.isClassConstructor && sym.isPrimaryConstructor) "primary constructor"
+ else if (sym.isClassConstructor) "constructor"
+ else if (sym.is(Method)) "method"
+ else if (sym.isTerm) "value"
+ else ""
+ }
+
+ /** String representation of symbol's definition key word */
+ protected def keyString(sym: Symbol): String = {
+ val flags = sym.flagsUNSAFE
+ if (flags is JavaTrait) "interface"
+ else if ((flags is Trait) && !(flags is ImplClass)) "trait"
+ else if (sym.isClass) "class"
+ else if (sym.isType) "type"
+ else if (flags is Mutable) "var"
+ else if (flags is Package) "package"
+ else if (flags is Module) "object"
+ else if (sym is Method) "def"
+ else if (sym.isTerm && (!(flags is Param))) "val"
+ else ""
+ }
+
+ /** String representation of symbol's flags */
+ protected def toTextFlags(sym: Symbol): Text =
+ Text(sym.flagsUNSAFE.flagStrings map stringToText, " ")
+
+ /** String representation of symbol's variance or "" if not applicable */
+ protected def varianceString(sym: Symbol): String = varianceString(sym.variance)
+
+ protected def varianceString(v: Int): String = v match {
+ case -1 => "-"
+ case 1 => "+"
+ case _ => ""
+ }
+
+ def annotsText(sym: Symbol): Text = Text(sym.annotations.map(toText))
+
+ def dclText(sym: Symbol): Text = dclTextWithInfo(sym, sym.unforcedInfo)
+
+ def dclText(d: SingleDenotation): Text = dclTextWithInfo(d.symbol, Some(d.info))
+
+ private def dclTextWithInfo(sym: Symbol, info: Option[Type]): Text =
+ (toTextFlags(sym) ~~ keyString(sym) ~~
+ (varianceString(sym) ~ nameString(sym)) ~ toTextRHS(info)).close
+
+ def toText(sym: Symbol): Text =
+ (kindString(sym) ~~ {
+ if (sym.isAnonymousClass) toText(sym.info.parents, " with ") ~ "{...}"
+ else if (hasMeaninglessName(sym)) simpleNameString(sym.owner) + idString(sym)
+ else nameString(sym)
+ }).close
+
+ def locationText(sym: Symbol): Text =
+ if (!sym.exists) ""
+ else {
+ val ownr = sym.effectiveOwner
+ if (ownr.isClass && !isEmptyPrefix(ownr)) " in " ~ toText(ownr) else Text()
+ }
+
+ def locatedText(sym: Symbol): Text =
+ (toText(sym) ~ locationText(sym)).close
+
+ def extendedLocationText(sym: Symbol): Text =
+ if (!sym.exists) ""
+ else {
+ def recur(ownr: Symbol, innerLocation: String): Text = {
+ def nextOuter(innerKind: String): Text =
+ recur(ownr.effectiveOwner,
+ if (!innerLocation.isEmpty) innerLocation
+ else s" in an anonymous $innerKind")
+ def showLocation(ownr: Symbol, where: String): Text =
+ innerLocation ~ " " ~ where ~ " " ~ toText(ownr)
+ if (ownr.isAnonymousClass) nextOuter("class")
+ else if (ownr.isAnonymousFunction) nextOuter("function")
+ else if (isEmptyPrefix(ownr)) ""
+ else if (ownr.isLocalDummy) showLocation(ownr.owner, "locally defined in")
+ else if (ownr.isTerm && !ownr.is(Module | Method)) showLocation(ownr, "in the initalizer of")
+ else showLocation(ownr, "in")
+ }
+ recur(sym.owner, "")
+ }
+
+ def toText(denot: Denotation): Text = toText(denot.symbol) ~ "/D"
+
+ @switch private def escapedChar(ch: Char): String = ch match {
+ case '\b' => "\\b"
+ case '\t' => "\\t"
+ case '\n' => "\\n"
+ case '\f' => "\\f"
+ case '\r' => "\\r"
+ case '"' => "\\\""
+ case '\'' => "\\\'"
+ case '\\' => "\\\\"
+ case _ => if (ch.isControl) "\\0" + toOctalString(ch) else String.valueOf(ch)
+ }
+
+ def toText(const: Constant): Text = const.tag match {
+ case StringTag => "\"" + escapedString(const.value.toString) + "\""
+ case ClazzTag => "classOf[" ~ toText(const.typeValue.classSymbol) ~ "]"
+ case CharTag => s"'${escapedChar(const.charValue)}'"
+ case LongTag => const.longValue.toString + "L"
+ case EnumTag => const.symbolValue.name.toString
+ case _ => String.valueOf(const.value)
+ }
+
+ def toText(annot: Annotation): Text = s"@${annot.symbol.name}" // for now
+
+ protected def escapedString(str: String): String = str flatMap escapedChar
+
+ def dclsText(syms: List[Symbol], sep: String): Text = Text(syms map dclText, sep)
+
+ def toText(sc: Scope): Text =
+ ("Scope{" ~ dclsText(sc.toList) ~ "}").close
+
+ def toText[T >: Untyped](tree: Tree[T]): Text = {
+ tree match {
+ case node: Positioned =>
+ def toTextElem(elem: Any): Text = elem match {
+ case elem: Showable => elem.toText(this)
+ case elem: List[_] => "List(" ~ Text(elem map toTextElem, ",") ~ ")"
+ case elem => elem.toString
+ }
+ val nodeName = node.productPrefix
+ val elems =
+ Text(node.productIterator.map(toTextElem).toList, ", ")
+ val tpSuffix =
+ if (ctx.settings.printtypes.value && tree.hasType)
+ " | " ~ toText(tree.typeOpt)
+ else
+ Text()
+
+ nodeName ~ "(" ~ elems ~ tpSuffix ~ ")" ~ node.pos.toString
+ case _ =>
+ tree.fallbackToText(this)
+ }
+ }.close // todo: override in refined printer
+
+ private var maxSummarized = Int.MaxValue
+
+ def summarized[T](depth: Int)(op: => T): T = {
+ val saved = maxSummarized
+ maxSummarized = ctx.toTextRecursions + depth
+ try op
+ finally maxSummarized = depth
+ }
+
+ def summarized[T](op: => T): T = summarized(summarizeDepth)(op)
+
+ def plain = this
+}
+
diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala
new file mode 100644
index 000000000..14b63012e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala
@@ -0,0 +1,105 @@
+package dotty.tools.dotc
+package printing
+
+import core._
+import Texts._, ast.Trees._
+import Types.Type, Symbols.Symbol, Contexts.Context, Scopes.Scope, Constants.Constant,
+ Names.Name, Denotations._, Annotations.Annotation
+
+/** The base class of all printers
+ */
+abstract class Printer {
+
+ private[this] var prec: Precedence = GlobalPrec
+
+ /** The current precedence level */
+ def currentPrecedence = prec
+
+ /** Generate text using `op`, assuming a given precedence level `prec`. */
+ def atPrec(prec: Precedence)(op: => Text): Text = {
+ val outerPrec = this.prec
+ this.prec = prec
+ try op
+ finally this.prec = outerPrec
+ }
+
+ /** Generate text using `op`, assuming a given precedence level `prec`.
+ * If new level `prec` is lower than previous level, put text in parentheses.
+ */
+ def changePrec(prec: Precedence)(op: => Text): Text =
+ if (prec < this.prec) atPrec(prec) ("(" ~ op ~ ")") else atPrec(prec)(op)
+
+ /** The name, possibley with with namespace suffix if debugNames is set:
+ * /L for local names, /V for other term names, /T for type names
+ */
+ def nameString(name: Name): String
+
+ /** The name of the given symbol.
+ * If !settings.debug, the original name where
+ * expansions of operators are translated back to operator symbol.
+ * E.g. $eq => =.
+ * If settings.uniqid, adds id.
+ */
+ def nameString(sym: Symbol): String
+
+ /** The fully qualified name of the symbol */
+ def fullNameString(sym: Symbol): String
+
+ /** The kind of the symbol */
+ def kindString(sym: Symbol): String
+
+ /** The name as a text */
+ def toText(name: Name): Text
+
+ /** Textual representation, including symbol's kind e.g., "class Foo", "method Bar".
+ * If hasMeaninglessName is true, uses the owner's name to disambiguate identity.
+ */
+ def toText(sym: Symbol): Text
+
+ /** Textual representation of symbol's declaration */
+ def dclText(sym: Symbol): Text
+
+ /** Textual representation of single denotation's declaration */
+ def dclText(sd: SingleDenotation): Text
+
+ /** If symbol's owner is a printable class C, the text "in C", otherwise "" */
+ def locationText(sym: Symbol): Text
+
+ /** Textual representation of symbol and its location */
+ def locatedText(sym: Symbol): Text
+
+ /** A description of sym's location */
+ def extendedLocationText(sym: Symbol): Text
+
+ /** Textual representation of denotation */
+ def toText(denot: Denotation): Text
+
+ /** Textual representation of constant */
+ def toText(const: Constant): Text
+
+ /** Textual representation of annotation */
+ def toText(annot: Annotation): Text
+
+ /** Textual representation of type */
+ def toText(tp: Type): Text
+
+ /** Textual representation of all symbols in given list,
+ * using `dclText` for displaying each.
+ */
+ def dclsText(syms: List[Symbol], sep: String = "\n"): Text
+
+ /** Textual representation of all definitions in a scope using `dclText` for each */
+ def toText(sc: Scope): Text
+
+ /** Textual representation of tree */
+ def toText[T >: Untyped](tree: Tree[T]): Text
+
+ /** Perform string or text-producing operation `op` so that only a
+ * summarized text with given recursion depth is shown
+ */
+ def summarized[T](depth: Int)(op: => T): T
+
+ /** A plain printer without any embellishments */
+ def plain: Printer
+}
+
diff --git a/compiler/src/dotty/tools/dotc/printing/Printers.scala b/compiler/src/dotty/tools/dotc/printing/Printers.scala
new file mode 100644
index 000000000..36043a4ff
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/Printers.scala
@@ -0,0 +1,14 @@
+package dotty.tools.dotc
+package printing
+
+import core.Contexts.Context
+
+trait Printers { this: Context =>
+
+ /** A function creating a printer */
+ def printer = {
+ val pr = printerFn(this)
+ if (this.settings.YplainPrinter.value) pr.plain else pr
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
new file mode 100644
index 000000000..29e1d4869
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -0,0 +1,652 @@
+package dotty.tools.dotc
+package printing
+
+import core._
+import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._
+import TypeErasure.ErasedValueType
+import Contexts.Context, Scopes.Scope, Denotations._, SymDenotations._, Annotations.Annotation
+import StdNames.{nme, tpnme}
+import ast.{Trees, untpd, tpd}
+import typer.{Namer, Inliner}
+import typer.ProtoTypes.{SelectionProto, ViewProto, FunProto, IgnoredProto, dummyTreeOfType}
+import Trees._
+import TypeApplications._
+import Decorators._
+import config.Config
+import scala.annotation.switch
+import language.implicitConversions
+
+class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
+
+ /** A stack of enclosing DefDef, TypeDef, or ClassDef, or ModuleDefs nodes */
+ private var enclosingDef: untpd.Tree = untpd.EmptyTree
+ private var myCtx: Context = _ctx
+ private var printPos = ctx.settings.Yprintpos.value
+ override protected[this] implicit def ctx: Context = myCtx
+
+ def withEnclosingDef(enclDef: Tree[_ >: Untyped])(op: => Text): Text = {
+ val savedCtx = myCtx
+ if (enclDef.hasType && enclDef.symbol.exists)
+ myCtx = ctx.withOwner(enclDef.symbol)
+ val savedDef = enclosingDef
+ enclosingDef = enclDef
+ try op finally {
+ myCtx = savedCtx
+ enclosingDef = savedDef
+ }
+ }
+
+ def inPattern(op: => Text): Text = {
+ val savedCtx = myCtx
+ myCtx = ctx.addMode(Mode.Pattern)
+ try op finally myCtx = savedCtx
+ }
+
+ def withoutPos(op: => Text): Text = {
+ val savedPrintPos = printPos
+ printPos = false
+ try op finally printPos = savedPrintPos
+ }
+
+ private def enclDefIsClass = enclosingDef match {
+ case owner: TypeDef[_] => owner.isClassDef
+ case owner: untpd.ModuleDef => true
+ case _ => false
+ }
+
+ override protected def recursionLimitExceeded() = {}
+
+ protected val PrintableFlags = (SourceModifierFlags | Label | Module | Local).toCommonFlags
+
+ override def nameString(name: Name): String = name.decode.toString
+
+ override protected def simpleNameString(sym: Symbol): String = {
+ val name = sym.originalName
+ nameString(if (sym is ExpandedTypeParam) name.asTypeName.unexpandedName else name)
+ }
+
+ override def fullNameString(sym: Symbol): String =
+ if (isEmptyPrefix(sym.maybeOwner)) nameString(sym)
+ else super.fullNameString(sym)
+
+ override protected def fullNameOwner(sym: Symbol) = {
+ val owner = super.fullNameOwner(sym)
+ if (owner is ModuleClass) owner.sourceModule else owner
+ }
+
+ override def toTextRef(tp: SingletonType): Text = controlled {
+ tp match {
+ case tp: ThisType =>
+ if (tp.cls.isAnonymousClass) return "this"
+ if (tp.cls is ModuleClass) return fullNameString(tp.cls.sourceModule)
+ case _ =>
+ }
+ super.toTextRef(tp)
+ }
+
+ override def toTextPrefix(tp: Type): Text = controlled {
+ def isOmittable(sym: Symbol) =
+ if (ctx.settings.verbose.value) false
+ else if (homogenizedView) isEmptyPrefix(sym) // drop <root> and anonymous classes, but not scala, Predef.
+ else isOmittablePrefix(sym)
+ tp match {
+ case tp: ThisType =>
+ if (isOmittable(tp.cls)) return ""
+ case tp @ TermRef(pre, _) =>
+ val sym = tp.symbol
+ if (sym.isPackageObject) return toTextPrefix(pre)
+ if (isOmittable(sym)) return ""
+ case _ =>
+ }
+ super.toTextPrefix(tp)
+ }
+
+ override protected def refinementNameString(tp: RefinedType): String =
+ if (tp.parent.isInstanceOf[WildcardType] || tp.refinedName == nme.WILDCARD)
+ super.refinementNameString(tp)
+ else {
+ val tsym = tp.parent.member(tp.refinedName).symbol
+ if (!tsym.exists) super.refinementNameString(tp)
+ else simpleNameString(tsym)
+ }
+
+ override def toText(tp: Type): Text = controlled {
+ def toTextTuple(args: List[Type]): Text =
+ "(" ~ Text(args.map(argText), ", ") ~ ")"
+ def toTextFunction(args: List[Type]): Text =
+ changePrec(GlobalPrec) {
+ val argStr: Text =
+ if (args.length == 2 && !defn.isTupleType(args.head))
+ atPrec(InfixPrec) { argText(args.head) }
+ else
+ toTextTuple(args.init)
+ argStr ~ " => " ~ argText(args.last)
+ }
+ homogenize(tp) match {
+ case AppliedType(tycon, args) =>
+ val cls = tycon.typeSymbol
+ if (tycon.isRepeatedParam) return toTextLocal(args.head) ~ "*"
+ if (defn.isFunctionClass(cls)) return toTextFunction(args)
+ if (defn.isTupleClass(cls)) return toTextTuple(args)
+ return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close
+ case tp: TypeRef =>
+ val hideType = tp.symbol is AliasPreferred
+ if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) {
+ tp.info match {
+ case TypeAlias(alias) => return toText(alias)
+ case _ => if (tp.prefix.isInstanceOf[ThisType]) return nameString(tp.symbol)
+ }
+ }
+ else if (tp.symbol.isAnonymousClass && !ctx.settings.uniqid.value)
+ return toText(tp.info)
+ case ExprType(result) =>
+ return "=> " ~ toText(result)
+ case ErasedValueType(tycon, underlying) =>
+ return "ErasedValueType(" ~ toText(tycon) ~ ", " ~ toText(underlying) ~ ")"
+ case tp: ClassInfo =>
+ return toTextParents(tp.parentsWithArgs) ~ "{...}"
+ case JavaArrayType(elemtp) =>
+ return toText(elemtp) ~ "[]"
+ case tp: AnnotatedType if homogenizedView =>
+ // Positions of annotations in types are not serialized
+ // (they don't need to because we keep the original type tree with
+ // the original annotation anyway. Therefore, there will always be
+ // one version of the annotation tree that has the correct positions).
+ withoutPos(super.toText(tp))
+ case tp: SelectionProto =>
+ return "?{ " ~ toText(tp.name) ~ (" " provided !tp.name.decode.last.isLetterOrDigit) ~
+ ": " ~ toText(tp.memberProto) ~ " }"
+ case tp: ViewProto =>
+ return toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType)
+ case tp @ FunProto(args, resultType, _) =>
+ val argsText = args match {
+ case dummyTreeOfType(tp) :: Nil if !(tp isRef defn.NullClass) => "null: " ~ toText(tp)
+ case _ => toTextGlobal(args, ", ")
+ }
+ return "FunProto(" ~ argsText ~ "):" ~ toText(resultType)
+ case tp: IgnoredProto =>
+ return "?"
+ case _ =>
+ }
+ super.toText(tp)
+ }
+
+ def blockText[T >: Untyped](trees: List[Tree[T]]): Text =
+ ("{" ~ toText(trees, "\n") ~ "}").close
+
+ override def toText[T >: Untyped](tree: Tree[T]): Text = controlled {
+
+ import untpd.{modsDeco => _, _}
+
+ /** Print modifiers from symbols if tree has type, overriding the untpd behavior. */
+ implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDecorator =
+ new untpd.ModsDecorator {
+ def mods = if (mdef.hasType) Modifiers(mdef.symbol) else mdef.rawMods
+ }
+
+ def Modifiers(sym: Symbol)(implicit ctx: Context): Modifiers = untpd.Modifiers(
+ sym.flags & (if (sym.isType) ModifierFlags | VarianceFlags else ModifierFlags),
+ if (sym.privateWithin.exists) sym.privateWithin.asType.name else tpnme.EMPTY,
+ sym.annotations map (_.tree))
+
+ def isLocalThis(tree: Tree) = tree.typeOpt match {
+ case tp: ThisType => tp.cls == ctx.owner.enclosingClass
+ case _ => false
+ }
+
+ def optDotPrefix(tree: This) = optText(tree.qual)(_ ~ ".") provided !isLocalThis(tree)
+
+ def optAscription(tpt: untpd.Tree) = optText(tpt)(": " ~ _)
+ // Dotty deviation: called with an untpd.Tree, so cannot be a untpd.Tree[T] (seems to be a Scala2 problem to allow this)
+ // More deviations marked below as // DD
+
+ def tparamsText[T >: Untyped](params: List[Tree]): Text =
+ "[" ~ toText(params, ", ") ~ "]" provided params.nonEmpty
+
+ def addVparamssText(txt: Text, vparamss: List[List[ValDef]]): Text =
+ (txt /: vparamss)((txt, vparams) => txt ~ "(" ~ toText(vparams, ", ") ~ ")")
+
+ def caseBlockText(tree: Tree): Text = tree match {
+ case Block(stats, expr) => toText(stats :+ expr, "\n")
+ case expr => toText(expr)
+ }
+
+ def enumText(tree: untpd.Tree) = tree match { // DD
+ case _: untpd.GenFrom | _: untpd.GenAlias => toText(tree)
+ case _ => "if " ~ toText(tree)
+ }
+
+ def forText(enums: List[untpd.Tree], expr: untpd.Tree, sep: String): Text = // DD
+ changePrec(GlobalPrec) { "for " ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) }
+
+ def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD
+ case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt)
+ case untpd.Function(_, tpt) => " <% " ~ toText(tpt)
+ }
+
+ def constrText(tree: untpd.Tree): Text = toTextLocal(tree).stripPrefix("new ") // DD
+
+ def annotText(tree: untpd.Tree): Text = "@" ~ constrText(tree) // DD
+
+ def useSymbol =
+ tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value
+
+ def modText(mods: untpd.Modifiers, kw: String): Text = { // DD
+ val suppressKw = if (enclDefIsClass) mods is ParamAndLocal else mods is Param
+ var flagMask =
+ if (ctx.settings.debugFlags.value) AnyFlags
+ else if (suppressKw) PrintableFlags &~ Private
+ else PrintableFlags
+ if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= Implicit // drop implicit from classes
+ val flagsText = (mods.flags & flagMask).toString
+ Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (kw provided !suppressKw)
+ }
+
+ def varianceText(mods: untpd.Modifiers) =
+ if (mods is Covariant) "+"
+ else if (mods is Contravariant) "-"
+ else ""
+
+ def argText(arg: Tree): Text = arg match {
+ case arg: TypeBoundsTree => "_" ~ toTextGlobal(arg)
+ case arg: TypeTree =>
+ arg.typeOpt match {
+ case tp: TypeBounds => "_" ~ toTextGlobal(arg)
+ case _ => toTextGlobal(arg)
+ }
+ case _ => toTextGlobal(arg)
+ }
+
+ def dclTextOr(treeText: => Text) =
+ if (useSymbol)
+ annotsText(tree.symbol) ~~ dclText(tree.symbol) ~
+ ( " <in " ~ toText(tree.symbol.owner) ~ ">" provided ctx.settings.debugOwners.value)
+ else treeText
+
+ def idText(tree: untpd.Tree): Text = {
+ if (ctx.settings.uniqid.value && tree.hasType && tree.symbol.exists) s"#${tree.symbol.id}" else ""
+ }
+
+ def nameIdText(tree: untpd.NameTree): Text =
+ if (tree.hasType && tree.symbol.exists) nameString(tree.symbol)
+ else toText(tree.name) ~ idText(tree)
+
+ def toTextTemplate(impl: Template, ofNew: Boolean = false): Text = {
+ val Template(constr @ DefDef(_, tparams, vparamss, _, _), parents, self, _) = impl
+ val tparamsTxt = withEnclosingDef(constr) { tparamsText(tparams) }
+ val primaryConstrs = if (constr.rhs.isEmpty) Nil else constr :: Nil
+ val prefix: Text =
+ if (vparamss.isEmpty || primaryConstrs.nonEmpty) tparamsTxt
+ else {
+ var modsText = modText(constr.mods, "")
+ if (!modsText.isEmpty) modsText = " " ~ modsText
+ if (constr.mods.hasAnnotations && !constr.mods.hasFlags) modsText = modsText ~~ " this"
+ withEnclosingDef(constr) { addVparamssText(tparamsTxt ~~ modsText, vparamss) }
+ }
+ val parentsText = Text(parents map constrText, " with ")
+ val selfText = {
+ val selfName = if (self.name == nme.WILDCARD) "this" else self.name.toString
+ (selfName ~ optText(self.tpt)(": " ~ _) ~ " =>").close
+ } provided !self.isEmpty
+ val bodyText = "{" ~~ selfText ~~ toTextGlobal(primaryConstrs ::: impl.body, "\n") ~ "}"
+ prefix ~ (" extends" provided !ofNew) ~~ parentsText ~~ bodyText
+ }
+
+ def toTextPackageId(pid: Tree): Text =
+ if (homogenizedView && pid.hasType) toTextLocal(pid.tpe)
+ else toTextLocal(pid)
+
+ def toTextCore(tree: Tree): Text = tree match {
+ case id: Trees.BackquotedIdent[_] if !homogenizedView =>
+ "`" ~ toText(id.name) ~ "`"
+ case Ident(name) =>
+ tree.typeOpt match {
+ case tp: NamedType if name != nme.WILDCARD =>
+ val pre = if (tp.symbol is JavaStatic) tp.prefix.widen else tp.prefix
+ toTextPrefix(pre) ~ selectionString(tp)
+ case _ =>
+ toText(name)
+ }
+ case tree @ Select(qual, name) =>
+ if (qual.isType) toTextLocal(qual) ~ "#" ~ toText(name)
+ else toTextLocal(qual) ~ ("." ~ nameIdText(tree) provided name != nme.CONSTRUCTOR)
+ case tree: This =>
+ optDotPrefix(tree) ~ "this" ~ idText(tree)
+ case Super(qual: This, mix) =>
+ optDotPrefix(qual) ~ "super" ~ optText(mix)("[" ~ _ ~ "]")
+ case Apply(fun, args) =>
+ if (fun.hasType && fun.symbol == defn.throwMethod)
+ changePrec (GlobalPrec) {
+ "throw " ~ toText(args.head)
+ }
+ else
+ toTextLocal(fun) ~ "(" ~ toTextGlobal(args, ", ") ~ ")"
+ case TypeApply(fun, args) =>
+ toTextLocal(fun) ~ "[" ~ toTextGlobal(args, ", ") ~ "]"
+ case Literal(c) =>
+ tree.typeOpt match {
+ case ConstantType(tc) => toText(tc)
+ case _ => toText(c)
+ }
+ case New(tpt) =>
+ "new " ~ {
+ tpt match {
+ case tpt: Template => toTextTemplate(tpt, ofNew = true)
+ case _ =>
+ if (tpt.hasType)
+ toTextLocal(tpt.typeOpt.underlyingClassRef(refinementOK = false))
+ else
+ toTextLocal(tpt)
+ }
+ }
+ case Typed(expr, tpt) =>
+ changePrec(InfixPrec) { toText(expr) ~ ": " ~ toText(tpt) }
+ case NamedArg(name, arg) =>
+ toText(name) ~ " = " ~ toText(arg)
+ case Assign(lhs, rhs) =>
+ changePrec(GlobalPrec) { toTextLocal(lhs) ~ " = " ~ toText(rhs) }
+ case Block(stats, expr) =>
+ blockText(stats :+ expr)
+ case If(cond, thenp, elsep) =>
+ changePrec(GlobalPrec) {
+ "if " ~ toText(cond) ~ (" then" provided !cond.isInstanceOf[Parens]) ~~ toText(thenp) ~ optText(elsep)(" else " ~ _)
+ }
+ case Closure(env, ref, target) =>
+ "closure(" ~ (toTextGlobal(env, ", ") ~ " | " provided env.nonEmpty) ~
+ toTextGlobal(ref) ~ (":" ~ toText(target) provided !target.isEmpty) ~ ")"
+ case Match(sel, cases) =>
+ if (sel.isEmpty) blockText(cases)
+ else changePrec(GlobalPrec) { toText(sel) ~ " match " ~ blockText(cases) }
+ case CaseDef(pat, guard, body) =>
+ "case " ~ inPattern(toText(pat)) ~ optText(guard)(" if " ~ _) ~ " => " ~ caseBlockText(body)
+ case Return(expr, from) =>
+ changePrec(GlobalPrec) { "return" ~ optText(expr)(" " ~ _) }
+ case Try(expr, cases, finalizer) =>
+ changePrec(GlobalPrec) {
+ "try " ~ toText(expr) ~ optText(cases)(" catch " ~ _) ~ optText(finalizer)(" finally " ~ _)
+ }
+ case Throw(expr) =>
+ changePrec(GlobalPrec) {
+ "throw " ~ toText(expr)
+ }
+ case SeqLiteral(elems, elemtpt) =>
+ "[" ~ toTextGlobal(elems, ",") ~ " : " ~ toText(elemtpt) ~ "]"
+ case tree @ Inlined(call, bindings, body) =>
+ (("/* inlined from " ~ toText(call) ~ "*/ ") provided !homogenizedView) ~
+ blockText(bindings :+ body)
+ case tpt: untpd.DerivedTypeTree =>
+ "<derived typetree watching " ~ summarized(toText(tpt.watched)) ~ ">"
+ case TypeTree() =>
+ toText(tree.typeOpt)
+ case SingletonTypeTree(ref) =>
+ toTextLocal(ref) ~ ".type"
+ case AndTypeTree(l, r) =>
+ changePrec(AndPrec) { toText(l) ~ " & " ~ toText(r) }
+ case OrTypeTree(l, r) =>
+ changePrec(OrPrec) { toText(l) ~ " | " ~ toText(r) }
+ case RefinedTypeTree(tpt, refines) =>
+ toTextLocal(tpt) ~ " " ~ blockText(refines)
+ case AppliedTypeTree(tpt, args) =>
+ toTextLocal(tpt) ~ "[" ~ Text(args map argText, ", ") ~ "]"
+ case PolyTypeTree(tparams, body) =>
+ changePrec(GlobalPrec) {
+ tparamsText(tparams) ~ " -> " ~ toText(body)
+ }
+ case ByNameTypeTree(tpt) =>
+ "=> " ~ toTextLocal(tpt)
+ case TypeBoundsTree(lo, hi) =>
+ optText(lo)(" >: " ~ _) ~ optText(hi)(" <: " ~ _)
+ case Bind(name, body) =>
+ changePrec(InfixPrec) { toText(name) ~ " @ " ~ toText(body) }
+ case Alternative(trees) =>
+ changePrec(OrPrec) { toText(trees, " | ") }
+ case UnApply(fun, implicits, patterns) =>
+ val extractor = fun match {
+ case Select(extractor, nme.unapply) => extractor
+ case _ => fun
+ }
+ toTextLocal(extractor) ~
+ "(" ~ toTextGlobal(patterns, ", ") ~ ")" ~
+ ("(" ~ toTextGlobal(implicits, ", ") ~ ")" provided implicits.nonEmpty)
+ case tree @ ValDef(name, tpt, _) =>
+ dclTextOr {
+ modText(tree.mods, if (tree.mods is Mutable) "var" else "val") ~~
+ nameIdText(tree) ~ optAscription(tpt) ~
+ withEnclosingDef(tree) { optText(tree.rhs)(" = " ~ _) }
+ }
+ case tree @ DefDef(name, tparams, vparamss, tpt, _) =>
+ dclTextOr {
+ val prefix = modText(tree.mods, "def") ~~ nameIdText(tree)
+ withEnclosingDef(tree) {
+ addVparamssText(prefix ~ tparamsText(tparams), vparamss) ~ optAscription(tpt) ~
+ optText(tree.rhs)(" = " ~ _)
+ }
+ }
+ case tree @ TypeDef(name, rhs) =>
+ def typeDefText(tparamsText: => Text, rhsText: => Text) =
+ dclTextOr {
+ modText(tree.mods, "type") ~~ (varianceText(tree.mods) ~ nameIdText(tree)) ~
+ withEnclosingDef(tree) {
+ if (tree.hasType) toText(tree.symbol.info) // TODO: always print RHS, once we pickle/unpickle type trees
+ else tparamsText ~ rhsText
+ }
+ }
+ def recur(rhs: Tree, tparamsTxt: => Text): Text = rhs match {
+ case impl: Template =>
+ modText(tree.mods, if ((tree).mods is Trait) "trait" else "class") ~~
+ nameIdText(tree) ~ withEnclosingDef(tree) { toTextTemplate(impl) } ~
+ (if (tree.hasType && ctx.settings.verbose.value) i"[decls = ${tree.symbol.info.decls}]" else "")
+ case rhs: TypeBoundsTree =>
+ typeDefText(tparamsTxt, toText(rhs))
+ case PolyTypeTree(tparams, body) =>
+ recur(body, tparamsText(tparams))
+ case rhs =>
+ typeDefText(tparamsTxt, optText(rhs)(" = " ~ _))
+ }
+ recur(rhs, "")
+ case Import(expr, selectors) =>
+ def selectorText(sel: Tree): Text = sel match {
+ case Thicket(l :: r :: Nil) => toTextGlobal(l) ~ " => " ~ toTextGlobal(r)
+ case _ => toTextGlobal(sel)
+ }
+ val selectorsText: Text = selectors match {
+ case id :: Nil => toText(id)
+ case _ => "{" ~ Text(selectors map selectorText, ", ") ~ "}"
+ }
+ "import " ~ toTextLocal(expr) ~ "." ~ selectorsText
+ case PackageDef(pid, stats) =>
+ val statsText = stats match {
+ case (pdef: PackageDef) :: Nil => toText(pdef)
+ case _ => toTextGlobal(stats, "\n")
+ }
+ val bodyText =
+ if (currentPrecedence == TopLevelPrec) "\n" ~ statsText else " {" ~ statsText ~ "}"
+ "package " ~ toTextPackageId(pid) ~ bodyText
+ case tree: Template =>
+ toTextTemplate(tree)
+ case Annotated(arg, annot) =>
+ toTextLocal(arg) ~~ annotText(annot)
+ case EmptyTree =>
+ "<empty>"
+ case TypedSplice(t) =>
+ toText(t)
+ case tree @ ModuleDef(name, impl) =>
+ withEnclosingDef(tree) {
+ modText(tree.mods, "object") ~~ nameIdText(tree) ~ toTextTemplate(impl)
+ }
+ case SymbolLit(str) =>
+ "'" + str
+ case InterpolatedString(id, segments) =>
+ def strText(str: Literal) = Str(escapedString(str.const.stringValue))
+ def segmentText(segment: Tree) = segment match {
+ case Thicket(List(str: Literal, expr)) => strText(str) ~ "{" ~ toTextGlobal(expr) ~ "}"
+ case str: Literal => strText(str)
+ }
+ toText(id) ~ "\"" ~ Text(segments map segmentText, "") ~ "\""
+ case Function(args, body) =>
+ var implicitSeen: Boolean = false
+ def argToText(arg: Tree) = arg match {
+ case arg @ ValDef(name, tpt, _) =>
+ val implicitText =
+ if ((arg.mods is Implicit) && !implicitSeen) { implicitSeen = true; "implicit " }
+ else ""
+ implicitText ~ toText(name) ~ optAscription(tpt)
+ case _ =>
+ toText(arg)
+ }
+ val argsText = args match {
+ case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg)
+ case _ => "(" ~ Text(args map argToText, ", ") ~ ")"
+ }
+ changePrec(GlobalPrec) {
+ argsText ~ " => " ~ toText(body)
+ }
+ case InfixOp(l, op, r) =>
+ val opPrec = parsing.precedence(op)
+ changePrec(opPrec) { toText(l) ~ " " ~ toText(op) ~ " " ~ toText(r) }
+ case PostfixOp(l, op) =>
+ changePrec(InfixPrec) { toText(l) ~ " " ~ toText(op) }
+ case PrefixOp(op, r) =>
+ changePrec(DotPrec) { toText(op) ~ " " ~ toText(r) }
+ case Parens(t) =>
+ "(" ~ toTextGlobal(t) ~ ")"
+ case Tuple(ts) =>
+ "(" ~ toTextGlobal(ts, ", ") ~ ")"
+ case WhileDo(cond, body) =>
+ changePrec(GlobalPrec) { "while " ~ toText(cond) ~ " do " ~ toText(body) }
+ case DoWhile(cond, body) =>
+ changePrec(GlobalPrec) { "do " ~ toText(body) ~ " while " ~ toText(cond) }
+ case ForYield(enums, expr) =>
+ forText(enums, expr, " yield ")
+ case ForDo(enums, expr) =>
+ forText(enums, expr, " do ")
+ case GenFrom(pat, expr) =>
+ toText(pat) ~ " <- " ~ toText(expr)
+ case GenAlias(pat, expr) =>
+ toText(pat) ~ " = " ~ toText(expr)
+ case ContextBounds(bounds, cxBounds) =>
+ (toText(bounds) /: cxBounds) {(t, cxb) =>
+ t ~ cxBoundToText(cxb)
+ }
+ case PatDef(mods, pats, tpt, rhs) =>
+ modText(mods, "val") ~~ toText(pats, ", ") ~ optAscription(tpt) ~
+ optText(rhs)(" = " ~ _)
+ case ParsedTry(expr, handler, finalizer) =>
+ changePrec(GlobalPrec) {
+ "try " ~ toText(expr) ~ " catch {" ~ toText(handler) ~ "}" ~ optText(finalizer)(" finally " ~ _)
+ }
+ case Thicket(trees) =>
+ "Thicket {" ~~ toTextGlobal(trees, "\n") ~~ "}"
+ case _ =>
+ tree.fallbackToText(this)
+ }
+
+ var txt = toTextCore(tree)
+
+ def suppressTypes =
+ tree.isType || tree.isDef || // don't print types of types or defs
+ homogenizedView && ctx.mode.is(Mode.Pattern)
+ // When comparing pickled info, disregard types of patterns.
+ // The reason is that GADT matching can rewrite types of pattern trees
+ // without changing the trees themselves. (see Typer.typedCase.indexPatterns.transform).
+ // But then pickling and unpickling the original trees will yield trees
+ // with the original types before they are rewritten, which causes a discrepancy.
+
+ def suppressPositions = tree match {
+ case _: WithoutTypeOrPos[_] | _: TypeTree => true // TypeTrees never have an interesting position
+ case _ => false
+ }
+
+ if (ctx.settings.printtypes.value && tree.hasType) {
+ // add type to term nodes; replace type nodes with their types unless -Yprintpos is also set.
+ def tp = tree.typeOpt match {
+ case tp: TermRef if tree.isInstanceOf[RefTree] && !tp.denot.isOverloaded => tp.underlying
+ case tp => tp
+ }
+ if (!suppressTypes)
+ txt = ("<" ~ txt ~ ":" ~ toText(tp) ~ ">").close
+ else if (tree.isType && !homogenizedView)
+ txt = toText(tp)
+ }
+ if (printPos && !suppressPositions) {
+ // add positions
+ val pos =
+ if (homogenizedView && !tree.isInstanceOf[MemberDef]) tree.pos.toSynthetic
+ else tree.pos
+ val clsStr = ""//if (tree.isType) tree.getClass.toString else ""
+ txt = (txt ~ "@" ~ pos.toString ~ clsStr).close
+ }
+ tree match {
+ case Block(_, _) | Template(_, _, _, _) => txt
+ case _ => txt.close
+ }
+ }
+
+ def optText(name: Name)(encl: Text => Text): Text =
+ if (name.isEmpty) "" else encl(toText(name))
+
+ def optText[T >: Untyped](tree: Tree[T])(encl: Text => Text): Text =
+ if (tree.isEmpty) "" else encl(toText(tree))
+
+ def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text =
+ if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else ""
+
+ override protected def polyParamNameString(name: TypeName): String =
+ name.unexpandedName.toString
+
+ override protected def treatAsTypeParam(sym: Symbol): Boolean = sym is TypeParam
+
+ override protected def treatAsTypeArg(sym: Symbol) =
+ sym.isType && (sym is ProtectedLocal) &&
+ (sym.allOverriddenSymbols exists (_ is TypeParam))
+
+ override def toText(sym: Symbol): Text = {
+ if (sym.isImport) {
+ def importString(tree: untpd.Tree) = s"import ${tree.show}"
+ sym.infoOrCompleter match {
+ case info: Namer#Completer => return importString(info.original)
+ case info: ImportType => return importString(info.expr)
+ case _ =>
+ }
+ }
+ if (sym.is(ModuleClass))
+ kindString(sym) ~~ (nameString(sym.name.stripModuleClassSuffix) + idString(sym))
+ else
+ super.toText(sym)
+ }
+
+ override def kindString(sym: Symbol) = {
+ val flags = sym.flagsUNSAFE
+ if (flags is Package) "package"
+ else if (sym.isPackageObject) "package object"
+ else if (flags is Module) "object"
+ else if (flags is ImplClass) "class"
+ else if (sym.isClassConstructor) "constructor"
+ else super.kindString(sym)
+ }
+
+ override protected def keyString(sym: Symbol): String = {
+ val flags = sym.flagsUNSAFE
+ if (sym.isType && sym.owner.isTerm) ""
+ else super.keyString(sym)
+ }
+
+ override def toTextFlags(sym: Symbol) =
+ if (ctx.settings.debugFlags.value)
+ super.toTextFlags(sym)
+ else {
+ var flags = sym.flagsUNSAFE
+ if (flags is TypeParam) flags = flags &~ Protected
+ Text((flags & PrintableFlags).flagStrings map stringToText, " ")
+ }
+
+ override def toText(denot: Denotation): Text = denot match {
+ case denot: MultiDenotation => Text(denot.alternatives.map(dclText), " <and> ")
+ case NoDenotation => "NoDenotation"
+ case _ =>
+ if (denot.symbol.exists) toText(denot.symbol)
+ else "some " ~ toText(denot.info)
+ }
+
+ override def plain = new PlainPrinter(_ctx)
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/Showable.scala b/compiler/src/dotty/tools/dotc/printing/Showable.scala
new file mode 100644
index 000000000..efddb26f7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/Showable.scala
@@ -0,0 +1,34 @@
+package dotty.tools.dotc
+package printing
+
+import core._
+
+import Contexts._, Texts._, Decorators._
+import config.Config.summarizeDepth
+import scala.util.control.NonFatal
+
+trait Showable extends Any {
+
+ /** The text representation of this showable element.
+ * This normally dispatches to a pattern matching
+ * method in Printers.
+ */
+ def toText(printer: Printer): Text
+
+ /** A fallback text representation, if the pattern matching
+ * in Printers does not have a case for this showable element
+ */
+ def fallbackToText(printer: Printer): Text = toString
+
+ /** The string representation of this showable element. */
+ def show(implicit ctx: Context): String = toText(ctx.printer).show
+
+ /** The summarized string representation of this showable element.
+ * Recursion depth is limited to some smallish value. Default is
+ * Config.summarizeDepth.
+ */
+ def showSummary(depth: Int)(implicit ctx: Context): String =
+ ctx.printer.summarized(depth)(show)
+
+ def showSummary(implicit ctx: Context): String = showSummary(summarizeDepth)
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
new file mode 100644
index 000000000..86f34e64d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
@@ -0,0 +1,304 @@
+package dotty.tools
+package dotc
+package printing
+
+import parsing.Tokens._
+import scala.annotation.switch
+import scala.collection.mutable.StringBuilder
+import core.Contexts.Context
+import Highlighting.{Highlight, HighlightBuffer}
+
+/** This object provides functions for syntax highlighting in the REPL */
+object SyntaxHighlighting {
+
+ val NoColor = Console.RESET
+ val CommentColor = Console.BLUE
+ val KeywordColor = Console.YELLOW
+ val ValDefColor = Console.CYAN
+ val LiteralColor = Console.RED
+ val TypeColor = Console.MAGENTA
+ val AnnotationColor = Console.MAGENTA
+
+ private def none(str: String) = str
+ private def keyword(str: String) = KeywordColor + str + NoColor
+ private def typeDef(str: String) = TypeColor + str + NoColor
+ private def literal(str: String) = LiteralColor + str + NoColor
+ private def valDef(str: String) = ValDefColor + str + NoColor
+ private def operator(str: String) = TypeColor + str + NoColor
+ private def annotation(str: String) =
+ if (str.trim == "@") str else AnnotationColor + str + NoColor
+ private val tripleQs = Console.RED_B + "???" + NoColor
+
+ private val keywords: Seq[String] = for {
+ index <- IF to INLINE // All alpha keywords
+ } yield tokenString(index)
+
+ private val interpolationPrefixes =
+ 'A' :: 'B' :: 'C' :: 'D' :: 'E' :: 'F' :: 'G' :: 'H' :: 'I' :: 'J' :: 'K' ::
+ 'L' :: 'M' :: 'N' :: 'O' :: 'P' :: 'Q' :: 'R' :: 'S' :: 'T' :: 'U' :: 'V' ::
+ 'W' :: 'X' :: 'Y' :: 'Z' :: '$' :: '_' :: 'a' :: 'b' :: 'c' :: 'd' :: 'e' ::
+ 'f' :: 'g' :: 'h' :: 'i' :: 'j' :: 'k' :: 'l' :: 'm' :: 'n' :: 'o' :: 'p' ::
+ 'q' :: 'r' :: 's' :: 't' :: 'u' :: 'v' :: 'w' :: 'x' :: 'y' :: 'z' :: Nil
+
+ private val typeEnders =
+ '{' :: '}' :: ')' :: '(' :: '[' :: ']' :: '=' :: ' ' :: ',' :: '.' ::
+ '\n' :: Nil
+
+ def apply(chars: Iterable[Char]): Iterable[Char] = {
+ var prev: Char = 0
+ var remaining = chars.toStream
+ val newBuf = new StringBuilder
+ var lastToken = ""
+
+ @inline def keywordStart =
+ prev == 0 || prev == ' ' || prev == '{' || prev == '(' ||
+ prev == '\n' || prev == '[' || prev == ','
+
+ @inline def numberStart(c: Char) =
+ c.isDigit && (!prev.isLetter || prev == '.' || prev == ' ' || prev == '(' || prev == '\u0000')
+
+ def takeChar(): Char = takeChars(1).head
+ def takeChars(x: Int): Seq[Char] = {
+ val taken = remaining.take(x)
+ remaining = remaining.drop(x)
+ taken
+ }
+
+ while (remaining.nonEmpty) {
+ val n = takeChar()
+ if (interpolationPrefixes.contains(n)) {
+ // Interpolation prefixes are a superset of the keyword start chars
+ val next = remaining.take(3).mkString
+ if (next.startsWith("\"")) {
+ newBuf += n
+ prev = n
+ if (remaining.nonEmpty) takeChar() // drop 1 for appendLiteral
+ appendLiteral('"', next == "\"\"\"")
+ } else {
+ if (n.isUpper && keywordStart) {
+ appendWhile(n, !typeEnders.contains(_), typeDef)
+ } else if (keywordStart) {
+ append(n, keywords.contains(_), { kw =>
+ if (kw == "new") typeDef(kw) else keyword(kw)
+ })
+ } else {
+ newBuf += n
+ prev = n
+ }
+ }
+ } else {
+ (n: @switch) match {
+ case '/' =>
+ if (remaining.nonEmpty) {
+ remaining.head match {
+ case '/' =>
+ takeChar()
+ eolComment()
+ case '*' =>
+ takeChar()
+ blockComment()
+ case x =>
+ newBuf += '/'
+ }
+ } else newBuf += '/'
+ case '=' =>
+ append('=', _ == "=>", operator)
+ case '<' =>
+ append('<', { x => x == "<-" || x == "<:" || x == "<%" }, operator)
+ case '>' =>
+ append('>', { x => x == ">:" }, operator)
+ case '#' =>
+ if (prev != ' ' && prev != '.') newBuf append operator("#")
+ else newBuf += n
+ prev = '#'
+ case '@' =>
+ appendWhile('@', !typeEnders.contains(_), annotation)
+ case '\"' =>
+ appendLiteral('\"', multiline = remaining.take(2).mkString == "\"\"")
+ case '\'' =>
+ appendLiteral('\'')
+ case '`' =>
+ appendTo('`', _ == '`', none)
+ case _ => {
+ if (n == '?' && remaining.take(2).mkString == "??") {
+ takeChars(2)
+ newBuf append tripleQs
+ prev = '?'
+ } else if (n.isUpper && keywordStart)
+ appendWhile(n, !typeEnders.contains(_), typeDef)
+ else if (numberStart(n))
+ appendWhile(n, { x => x.isDigit || x == '.' || x == '\u0000'}, literal)
+ else
+ newBuf += n; prev = n
+ }
+ }
+ }
+ }
+
+ def eolComment() = {
+ newBuf append (CommentColor + "//")
+ var curr = '/'
+ while (curr != '\n' && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+ }
+ prev = curr
+ newBuf append NoColor
+ }
+
+ def blockComment() = {
+ newBuf append (CommentColor + "/*")
+ var curr = '*'
+ var open = 1
+ while (open > 0 && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+
+ if (curr == '*' && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+ if (curr == '/') open -= 1
+ } else if (curr == '/' && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+ if (curr == '*') open += 1
+ }
+ }
+ prev = curr
+ newBuf append NoColor
+ }
+
+ def appendLiteral(delim: Char, multiline: Boolean = false) = {
+ var curr: Char = 0
+ var continue = true
+ var closing = 0
+ val inInterpolation = interpolationPrefixes.contains(prev)
+ newBuf append (LiteralColor + delim)
+
+ def shouldInterpolate =
+ inInterpolation && curr == '$' && prev != '$' && remaining.nonEmpty
+
+ def interpolate() = {
+ val next = takeChar()
+ if (next == '$') {
+ newBuf += curr
+ newBuf += next
+ prev = '$'
+ } else if (next == '{') {
+ var open = 1 // keep track of open blocks
+ newBuf append (ValDefColor + curr)
+ newBuf += next
+ while (remaining.nonEmpty && open > 0) {
+ var c = takeChar()
+ newBuf += c
+ if (c == '}') open -= 1
+ else if (c == '{') open += 1
+ }
+ newBuf append LiteralColor
+ } else {
+ newBuf append (ValDefColor + curr)
+ newBuf += next
+ var c: Char = 'a'
+ while (c.isLetterOrDigit && remaining.nonEmpty) {
+ c = takeChar()
+ if (c != '"') newBuf += c
+ }
+ newBuf append LiteralColor
+ if (c == '"') {
+ newBuf += c
+ continue = false
+ }
+ }
+ closing = 0
+ }
+
+ while (continue && remaining.nonEmpty) {
+ curr = takeChar()
+ if (curr == '\\' && remaining.nonEmpty) {
+ val next = takeChar()
+ newBuf append (KeywordColor + curr)
+ if (next == 'u') {
+ val code = "u" + takeChars(4).mkString
+ newBuf append code
+ } else newBuf += next
+ newBuf append LiteralColor
+ closing = 0
+ } else if (shouldInterpolate) {
+ interpolate()
+ } else if (curr == delim && multiline) {
+ closing += 1
+ if (closing == 3) continue = false
+ newBuf += curr
+ } else if (curr == delim) {
+ continue = false
+ newBuf += curr
+ } else {
+ newBuf += curr
+ closing = 0
+ }
+ }
+ newBuf append NoColor
+ prev = curr
+ }
+
+ def append(c: Char, shouldHL: String => Boolean, highlight: String => String) = {
+ var curr: Char = 0
+ val sb = new StringBuilder(s"$c")
+
+ def delim(c: Char) = (c: @switch) match {
+ case ' ' => true
+ case '\n' => true
+ case '(' => true
+ case '[' => true
+ case ':' => true
+ case '@' => true
+ case _ => false
+ }
+
+ while (remaining.nonEmpty && !delim(curr)) {
+ curr = takeChar()
+ if (!delim(curr)) sb += curr
+ }
+
+ val str = sb.toString
+ val toAdd =
+ if (shouldHL(str))
+ highlight(str)
+ else if (("var" :: "val" :: "def" :: "case" :: Nil).contains(lastToken))
+ valDef(str)
+ else str
+ val suffix = if (delim(curr)) s"$curr" else ""
+ newBuf append (toAdd + suffix)
+ lastToken = str
+ prev = curr
+ }
+
+ def appendWhile(c: Char, pred: Char => Boolean, highlight: String => String) = {
+ var curr: Char = 0
+ val sb = new StringBuilder(s"$c")
+ while (remaining.nonEmpty && pred(curr)) {
+ curr = takeChar()
+ if (pred(curr)) sb += curr
+ }
+
+ val str = sb.toString
+ val suffix = if (!pred(curr)) s"$curr" else ""
+ newBuf append (highlight(str) + suffix)
+ prev = curr
+ }
+
+ def appendTo(c: Char, pred: Char => Boolean, highlight: String => String) = {
+ var curr: Char = 0
+ val sb = new StringBuilder(s"$c")
+ while (remaining.nonEmpty && !pred(curr)) {
+ curr = takeChar()
+ sb += curr
+ }
+
+ newBuf append highlight(sb.toString)
+ prev = curr
+ }
+
+ newBuf.toIterable
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala
new file mode 100644
index 000000000..db81cab7a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala
@@ -0,0 +1,168 @@
+package dotty.tools.dotc
+package printing
+import core.Contexts.Context
+import language.implicitConversions
+
+object Texts {
+
+ abstract class Text {
+
+ protected def indentMargin = 2
+
+ def relems: List[Text]
+
+ def isEmpty: Boolean = this match {
+ case Str(s) => s.isEmpty
+ case Fluid(relems) => relems forall (_.isEmpty)
+ case Vertical(relems) => relems.isEmpty
+ }
+
+ def isVertical = isInstanceOf[Vertical]
+ def isClosed = isVertical || isInstanceOf[Closed]
+ def isFluid = isInstanceOf[Fluid]
+ def isSplittable = isFluid && !isClosed
+
+ def close = new Closed(relems)
+
+ def remaining(width: Int): Int = this match {
+ case Str(s) =>
+ width - s.length
+ case Fluid(Nil) =>
+ width
+ case Fluid(last :: prevs) =>
+ val r = last remaining width
+ if (r < 0) r else Fluid(prevs) remaining r
+ case Vertical(_) =>
+ -1
+ }
+
+ def lastLine: String = this match {
+ case Str(s) => s
+ case _ => relems.head.lastLine
+ }
+
+ def appendToLastLine(that: Text): Text = that match {
+ case Str(s2) =>
+ this match {
+ case Str(s1) => Str(s1 + s2)
+ case Fluid(Str(s1) :: prev) => Fluid(Str(s1 + s2) :: prev)
+ case Fluid(relems) => Fluid(that :: relems)
+ }
+ case Fluid(relems) =>
+ (this /: relems.reverse)(_ appendToLastLine _)
+ }
+
+ private def appendIndented(that: Text)(width: Int): Text =
+ Vertical(that.layout(width - indentMargin).indented :: this.relems)
+
+ private def append(width: Int)(that: Text): Text = {
+ if (this.isEmpty) that.layout(width)
+ else if (that.isEmpty) this
+ else if (that.isVertical) appendIndented(that)(width)
+ else if (this.isVertical) Fluid(that.layout(width) :: this.relems)
+ else if (that.remaining(width - lastLine.length) >= 0) appendToLastLine(that)
+ else if (that.isSplittable) (this /: that.relems.reverse)(_.append(width)(_))
+ else appendIndented(that)(width)
+ }
+
+ def layout(width: Int): Text = this match {
+ case Str(_) =>
+ this
+ case Fluid(relems) =>
+ ((Str(""): Text) /: relems.reverse)(_.append(width)(_))
+ case Vertical(relems) =>
+ Vertical(relems map (_ layout width))
+ }
+
+ def map(f: String => String): Text = this match {
+ case Str(s) => Str(f(s))
+ case Fluid(relems) => Fluid(relems map (_ map f))
+ case Vertical(relems) => Vertical(relems map (_ map f))
+ }
+
+ def stripPrefix(pre: String): Text = this match {
+ case Str(s) =>
+ if (s.startsWith(pre)) s drop pre.length else s
+ case Fluid(relems) =>
+ val elems = relems.reverse
+ val head = elems.head.stripPrefix(pre)
+ if (head eq elems.head) this else Fluid((head :: elems.tail).reverse)
+ case Vertical(relems) =>
+ val elems = relems.reverse
+ val head = elems.head.stripPrefix(pre)
+ if (head eq elems.head) this else Vertical((head :: elems.tail).reverse)
+ }
+
+ private def indented: Text = this match {
+ case Str(s) => Str((" " * indentMargin) + s)
+ case Fluid(relems) => Fluid(relems map (_.indented))
+ case Vertical(relems) => Vertical(relems map (_.indented))
+ }
+
+ def print(sb: StringBuilder): Unit = this match {
+ case Str(s) =>
+ sb.append(s)
+ case _ =>
+ var follow = false
+ for (elem <- relems.reverse) {
+ if (follow) sb.append("\n")
+ elem.print(sb)
+ follow = true
+ }
+ }
+
+ def mkString(width: Int): String = {
+ val sb = new StringBuilder
+ layout(width).print(sb)
+ sb.toString
+ }
+
+ def ~ (that: Text) =
+ if (this.isEmpty) that
+ else if (that.isEmpty) this
+ else Fluid(that :: this :: Nil)
+
+ def ~~ (that: Text) =
+ if (this.isEmpty) that
+ else if (that.isEmpty) this
+ else Fluid(that :: Str(" ") :: this :: Nil)
+
+ def over (that: Text) =
+ if (this.isVertical) Vertical(that :: this.relems)
+ else Vertical(that :: this :: Nil)
+
+ def provided(pred: Boolean) = if (pred) this else Str("")
+ }
+
+ object Text {
+
+ /** The empty text */
+ def apply(): Text = Str("")
+
+ /** A concatenation of elements in `xs` and interspersed with
+ * separator strings `sep`.
+ */
+ def apply(xs: Traversable[Text], sep: String = " "): Text = {
+ if (sep == "\n") lines(xs)
+ else {
+ val ys = xs filterNot (_.isEmpty)
+ if (ys.isEmpty) Str("")
+ else ys reduce (_ ~ sep ~ _)
+ }
+ }
+
+ /** The given texts `xs`, each on a separate line */
+ def lines(xs: Traversable[Text]) = Vertical(xs.toList.reverse)
+ }
+
+ case class Str(s: String) extends Text {
+ override def relems: List[Text] = List(this)
+ }
+
+ case class Vertical(relems: List[Text]) extends Text
+ case class Fluid(relems: List[Text]) extends Text
+
+ class Closed(relems: List[Text]) extends Fluid(relems)
+
+ implicit def stringToText(s: String): Text = Str(s)
+}
diff --git a/compiler/src/dotty/tools/dotc/printing/package.scala b/compiler/src/dotty/tools/dotc/printing/package.scala
new file mode 100644
index 000000000..814eb2ad0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/printing/package.scala
@@ -0,0 +1,17 @@
+package dotty.tools.dotc
+
+import core.StdNames.nme
+import parsing.{precedence, minPrec, maxPrec, minInfixPrec}
+
+package object printing {
+
+ type Precedence = Int
+
+ val DotPrec = parsing.maxPrec
+ val AndPrec = parsing.precedence(nme.raw.AMP)
+ val OrPrec = parsing.precedence(nme.raw.BAR)
+ val InfixPrec = parsing.minInfixPrec
+ val GlobalPrec = parsing.minPrec
+ val TopLevelPrec = parsing.minPrec - 1
+
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/AbstractFileClassLoader.scala b/compiler/src/dotty/tools/dotc/repl/AbstractFileClassLoader.scala
new file mode 100644
index 000000000..a3a463717
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/AbstractFileClassLoader.scala
@@ -0,0 +1,31 @@
+package dotty.tools
+package dotc
+package repl
+
+import io.AbstractFile
+
+/**
+ * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
+ *
+ * @author Lex Spoon
+ */
+class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
+extends ClassLoader(parent)
+{
+ override def findClass(name: String): Class[_] = {
+ var file: AbstractFile = root
+ val pathParts = name.split("[./]").toList
+ for (dirPart <- pathParts.init) {
+ file = file.lookupName(dirPart, true)
+ if (file == null) {
+ throw new ClassNotFoundException(name)
+ }
+ }
+ file = file.lookupName(pathParts.last+".class", false)
+ if (file == null) {
+ throw new ClassNotFoundException(name)
+ }
+ val bytes = file.toByteArray
+ defineClass(name, bytes, 0, bytes.length)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/AmmoniteReader.scala b/compiler/src/dotty/tools/dotc/repl/AmmoniteReader.scala
new file mode 100644
index 000000000..f3b68e4b0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/AmmoniteReader.scala
@@ -0,0 +1,82 @@
+package dotty.tools
+package dotc
+package repl
+
+import core.Contexts._
+import ammonite.terminal._
+import LazyList._
+import Ansi.Color
+import filters._
+import BasicFilters._
+import GUILikeFilters._
+import util.SourceFile
+import printing.SyntaxHighlighting
+
+class AmmoniteReader(val interpreter: Interpreter)(implicit ctx: Context) extends InteractiveReader {
+ val interactive = true
+
+ def incompleteInput(str: String): Boolean =
+ interpreter.delayOutputDuring(interpreter.interpret(str)) match {
+ case Interpreter.Incomplete => true
+ case _ => false
+ }
+
+ val reader = new java.io.InputStreamReader(System.in)
+ val writer = new java.io.OutputStreamWriter(System.out)
+ val cutPasteFilter = ReadlineFilters.CutPasteFilter()
+ var history = List.empty[String]
+ val selectionFilter = GUILikeFilters.SelectionFilter(indent = 2)
+ val multilineFilter: Filter = Filter("multilineFilter") {
+ case TermState(lb ~: rest, b, c, _)
+ if (lb == 10 || lb == 13) && incompleteInput(b.mkString) =>
+ BasicFilters.injectNewLine(b, c, rest, indent = 2)
+ }
+
+ def readLine(prompt: String): String = {
+ val historyFilter = new HistoryFilter(
+ () => history.toVector,
+ Console.BLUE,
+ AnsiNav.resetForegroundColor
+ )
+
+ val allFilters = Filter.merge(
+ UndoFilter(),
+ historyFilter,
+ selectionFilter,
+ GUILikeFilters.altFilter,
+ GUILikeFilters.fnFilter,
+ ReadlineFilters.navFilter,
+ cutPasteFilter,
+ multilineFilter,
+ BasicFilters.all
+ )
+
+ Terminal.readLine(
+ Console.BLUE + prompt + Console.RESET,
+ reader,
+ writer,
+ allFilters,
+ displayTransform = (buffer, cursor) => {
+ val coloredBuffer =
+ if (ctx.useColors) SyntaxHighlighting(buffer)
+ else buffer
+
+ val ansiBuffer = Ansi.Str.parse(coloredBuffer.toVector)
+ val (newBuffer, cursorOffset) = SelectionFilter.mangleBuffer(
+ selectionFilter, ansiBuffer, cursor, Ansi.Reversed.On
+ )
+ val newNewBuffer = HistoryFilter.mangleBuffer(
+ historyFilter, newBuffer, cursor,
+ Ansi.Color.Green
+ )
+
+ (newNewBuffer, cursorOffset)
+ }
+ ) match {
+ case Some(res) =>
+ history = res :: history;
+ res
+ case None => ":q"
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
new file mode 100644
index 000000000..5b3669d5e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
@@ -0,0 +1,966 @@
+package dotty.tools
+package dotc
+package repl
+
+import java.io.{
+ File, PrintWriter, PrintStream, StringWriter, Writer, OutputStream,
+ ByteArrayOutputStream => ByteOutputStream
+}
+import java.lang.{Class, ClassLoader}
+import java.net.{URL, URLClassLoader}
+
+import scala.collection.immutable.ListSet
+import scala.collection.mutable
+import scala.collection.mutable.{ListBuffer, HashSet, ArrayBuffer}
+
+//import ast.parser.SyntaxAnalyzer
+import io.{PlainFile, VirtualDirectory}
+import scala.reflect.io.{PlainDirectory, Directory}
+import reporting.{ConsoleReporter, Reporter}
+import core.Flags
+import util.{SourceFile, NameTransformer}
+import io.ClassPath
+import ast.Trees._
+import parsing.Parsers._
+import core._
+import dotty.tools.backend.jvm.GenBCode
+import Symbols._, Types._, Contexts._, StdNames._, Names._, NameOps._
+import Decorators._
+import scala.util.control.NonFatal
+import printing.SyntaxHighlighting
+
+/** An interpreter for Scala code which is based on the `dotc` compiler.
+ *
+ * The overall approach is based on compiling the requested code and then
+ * using a Java classloader and Java reflection to run the code
+ * and access its results.
+ *
+ * In more detail, a single compiler instance is used
+ * to accumulate all successfully compiled or interpreted Scala code. To
+ * "interpret" a line of code, the compiler generates a fresh object that
+ * includes the line of code and which has public definition(s) to export
+ * all variables defined by that code. To extract the result of an
+ * interpreted line to show the user, a second "result object" is created
+ * which imports the variables exported by the above object and then
+ * exports a single definition named "result". To accommodate user expressions
+ * that read from variables or methods defined in previous statements, "import"
+ * statements are used.
+ *
+ * This interpreter shares the strengths and weaknesses of using the
+ * full compiler-to-Java. The main strength is that interpreted code
+ * behaves exactly as does compiled code, including running at full speed.
+ * The main weakness is that redefining classes and methods is not handled
+ * properly, because rebinding at the Java level is technically difficult.
+ *
+ * @author Moez A. Abdel-Gawad
+ * @author Lex Spoon
+ * @author Martin Odersky
+ *
+ * @param out The output to use for diagnostics
+ * @param ictx The context to use for initialization of the interpreter,
+ * needed to access the current classpath.
+ */
+class CompilingInterpreter(
+ out: PrintWriter,
+ ictx: Context,
+ parentClassLoader: Option[ClassLoader]
+) extends Compiler with Interpreter {
+ import ast.untpd._
+ import CompilingInterpreter._
+
+ ictx.base.initialize()(ictx)
+
+ /** directory to save .class files to */
+ val virtualDirectory =
+ if (ictx.settings.d.isDefault(ictx)) new VirtualDirectory("(memory)", None)
+ else new PlainDirectory(new Directory(new java.io.File(ictx.settings.d.value(ictx)))) // for now, to help debugging
+
+ /** A GenBCode phase that uses `virtualDirectory` for its output */
+ private class REPLGenBCode extends GenBCode {
+ override def outputDir(implicit ctx: Context) = virtualDirectory
+ }
+
+ /** Phases of this compiler use `REPLGenBCode` instead of `GenBCode`. */
+ override def phases = Phases.replace(
+ classOf[GenBCode], _ => new REPLGenBCode :: Nil, super.phases)
+
+ /** whether to print out result lines */
+ private var printResults: Boolean = true
+ private var delayOutput: Boolean = false
+
+ val previousOutput = ListBuffer.empty[String]
+
+ override def lastOutput() = {
+ val prev = previousOutput.toList
+ previousOutput.clear()
+ prev
+ }
+
+ override def delayOutputDuring[T](operation: => T): T = {
+ val old = delayOutput
+ try {
+ delayOutput = true
+ operation
+ } finally {
+ delayOutput = old
+ }
+ }
+
+ /** Temporarily be quiet */
+ override def beQuietDuring[T](operation: => T): T = {
+ val wasPrinting = printResults
+ try {
+ printResults = false
+ operation
+ } finally {
+ printResults = wasPrinting
+ }
+ }
+
+ private def newReporter =
+ new ConsoleReporter(Console.in, out) {
+ override def printMessage(msg: String) =
+ if (!delayOutput) {
+ out.print(/*clean*/(msg) + "\n")
+ // Suppress clean for now for compiler messages
+ // Otherwise we will completely delete all references to
+ // line$object$ module classes. The previous interpreter did not
+ // have the project because the module class was written without the final `$'
+ // and therefore escaped the purge. We can turn this back on once
+ // we drop the final `$' from module classes.
+ out.flush()
+ } else {
+ previousOutput += (/*clean*/(msg) + "\n")
+ }
+ }
+
+ /** the previous requests this interpreter has processed */
+ private val prevRequests = new ArrayBuffer[Request]()
+
+ /** the compiler's classpath, as URL's */
+ val compilerClasspath: List[URL] = ictx.platform.classPath(ictx).asURLs
+
+ /* A single class loader is used for all commands interpreted by this Interpreter.
+ It would also be possible to create a new class loader for each command
+ to interpret. The advantages of the current approach are:
+
+ - Expressions are only evaluated one time. This is especially
+ significant for I/O, e.g. "val x = Console.readLine"
+
+ The main disadvantage is:
+
+ - Objects, classes, and methods cannot be rebound. Instead, definitions
+ shadow the old ones, and old code objects refer to the old
+ definitions.
+ */
+ /** class loader used to load compiled code */
+ val classLoader: ClassLoader = {
+ lazy val parent = new URLClassLoader(compilerClasspath.toArray,
+ classOf[Interpreter].getClassLoader)
+
+ new AbstractFileClassLoader(virtualDirectory, parentClassLoader.getOrElse(parent))
+ }
+
+ // Set the current Java "context" class loader to this interpreter's class loader
+ Thread.currentThread.setContextClassLoader(classLoader)
+
+ /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
+ private def parse(line: String)(implicit ctx: Context): Option[List[Tree]] = {
+ var justNeedsMore = false
+ val reporter = newReporter
+ reporter.withIncompleteHandler { _ => _ => justNeedsMore = true } {
+ // simple parse: just parse it, nothing else
+ def simpleParse(code: String)(implicit ctx: Context): List[Tree] = {
+ val source = new SourceFile("<console>", code.toCharArray())
+ val parser = new Parser(source)
+ val (selfDef, stats) = parser.templateStatSeq
+ stats
+ }
+ val trees = simpleParse(line)(ctx.fresh.setReporter(reporter))
+ if (reporter.hasErrors) {
+ Some(Nil) // the result did not parse, so stop
+ } else if (justNeedsMore) {
+ None
+ } else {
+ Some(trees)
+ }
+ }
+ }
+
+ /** Compile a SourceFile. Returns the root context of the run that compiled the file.
+ */
+ def compileSources(sources: List[SourceFile])(implicit ctx: Context): Context = {
+ val reporter = newReporter
+ val run = newRun(ctx.fresh.setReporter(reporter))
+ run.compileSources(sources)
+ run.runContext
+ }
+
+ /** Compile a string. Returns true if there are no
+ * compilation errors, or false otherwise.
+ */
+ def compileString(code: String)(implicit ctx: Context): Boolean = {
+ val runCtx = compileSources(List(new SourceFile("<script>", code.toCharArray)))
+ !runCtx.reporter.hasErrors
+ }
+
+ override def interpret(line: String)(implicit ctx: Context): Interpreter.Result = {
+ // if (prevRequests.isEmpty)
+ // new Run(this) // initialize the compiler // (not sure this is needed)
+ // parse
+ parse(line) match {
+ case None => Interpreter.Incomplete
+ case Some(Nil) => Interpreter.Error // parse error or empty input
+ case Some(tree :: Nil) if tree.isTerm && !tree.isInstanceOf[Assign] =>
+ previousOutput.clear() // clear previous error reporting
+ interpret(s"val $newVarName =\n$line")
+ case Some(trees) =>
+ previousOutput.clear() // clear previous error reporting
+ val req = new Request(line, newLineName)
+ if (!req.compile())
+ Interpreter.Error // an error happened during compilation, e.g. a type error
+ else {
+ val (resultStrings, succeeded) = req.loadAndRun()
+ if (delayOutput)
+ previousOutput ++= resultStrings.map(clean)
+ else if (printResults || !succeeded)
+ resultStrings.foreach(x => out.print(clean(x)))
+ if (succeeded) {
+ prevRequests += req
+ Interpreter.Success
+ }
+ else Interpreter.Error
+ }
+ }
+ }
+
+ private def loadAndSetValue(objectName: String, value: AnyRef) = {
+ /** This terrible string is the wrapped class's full name inside the
+ * classloader:
+ * lineX$object$$iw$$iw$list$object
+ */
+ val objName: String = List(
+ currentLineName + INTERPRETER_WRAPPER_SUFFIX,
+ INTERPRETER_IMPORT_WRAPPER,
+ INTERPRETER_IMPORT_WRAPPER,
+ objectName
+ ).mkString("$")
+
+ try {
+ val resObj: Class[_] = Class.forName(objName, true, classLoader)
+ val setMethod = resObj.getDeclaredMethods.find(_.getName == "set")
+
+ setMethod.fold(false) { method =>
+ method.invoke(resObj, value) == null
+ }
+ } catch {
+ case NonFatal(_) =>
+ // Unable to set value on object due to exception during reflection
+ false
+ }
+ }
+
+ /** This bind is implemented by creating an object with a set method and a
+ * field `value`. The value is then set via Java reflection.
+ *
+ * Example: We want to bind a value `List(1,2,3)` to identifier `list` from
+ * sbt. The bind method accomplishes this by creating the following:
+ * {{{
+ * object ContainerObjectWithUniqueID {
+ * var value: List[Int] = _
+ * def set(x: Any) = value = x.asInstanceOf[List[Int]]
+ * }
+ * val list = ContainerObjectWithUniqueID.value
+ * }}}
+ *
+ * Between the object being created and the value being assigned, the value
+ * inside the object is set via reflection.
+ */
+ override def bind(id: String, boundType: String, value: AnyRef)(implicit ctx: Context): Interpreter.Result =
+ interpret(
+ """
+ |object %s {
+ | var value: %s = _
+ | def set(x: Any) = value = x.asInstanceOf[%s]
+ |}
+ """.stripMargin.format(id + INTERPRETER_WRAPPER_SUFFIX, boundType, boundType)
+ ) match {
+ case Interpreter.Success if loadAndSetValue(id + INTERPRETER_WRAPPER_SUFFIX, value) =>
+ val line = "val %s = %s.value".format(id, id + INTERPRETER_WRAPPER_SUFFIX)
+ interpret(line)
+ case Interpreter.Error | Interpreter.Incomplete =>
+ out.println("Set failed in bind(%s, %s, %s)".format(id, boundType, value))
+ Interpreter.Error
+ }
+
+ /** Trait collecting info about one of the statements of an interpreter request */
+ private trait StatementInfo {
+ /** The statement */
+ def statement: Tree
+
+ /** The names defined previously and referred to in the statement */
+ def usedNames: List[Name]
+
+ /** The names defined in the statement */
+ val boundNames: List[Name]
+
+ /** Statement is an import that contains a wildcard */
+ val importsWildcard: Boolean
+
+ /** The names imported by the statement (if it is an import clause) */
+ val importedNames: Seq[Name]
+
+ /** Statement defines an implicit calue or method */
+ val definesImplicit: Boolean
+ }
+
+ /** One line of code submitted by the user for interpretation */
+ private class Request(val line: String, val lineName: String)(implicit ctx: Context) {
+ private val trees = {
+ val parsed = parse(line)
+ previousOutput.clear() // clear previous error reporting
+ parsed match {
+ case Some(ts) => ts
+ case None => Nil
+ }
+ }
+
+ /** name to use for the object that will compute "line" */
+ private def objectName = lineName + INTERPRETER_WRAPPER_SUFFIX
+
+ /** name of the object that retrieves the result from the above object */
+ private def resultObjectName = "RequestResult$" + objectName
+
+ private def chooseHandler(stat: Tree): StatementHandler = stat match {
+ case stat: DefDef => new DefHandler(stat)
+ case stat: ValDef => new ValHandler(stat)
+ case stat: PatDef => new PatHandler(stat)
+ case stat @ Assign(Ident(_), _) => new AssignHandler(stat)
+ case stat: ModuleDef => new ModuleHandler(stat)
+ case stat: TypeDef if stat.isClassDef => new ClassHandler(stat)
+ case stat: TypeDef => new TypeAliasHandler(stat)
+ case stat: Import => new ImportHandler(stat)
+// case DocDef(_, documented) => chooseHandler(documented)
+ case stat => new GenericHandler(stat)
+ }
+
+ private val handlers: List[StatementHandler] = trees.map(chooseHandler)
+
+ /** all (public) names defined by these statements */
+ private val boundNames = ListSet(handlers.flatMap(_.boundNames): _*).toList
+
+ /** list of names used by this expression */
+ private val usedNames: List[Name] = handlers.flatMap(_.usedNames)
+
+ private val (importsPreamble, importsTrailer, accessPath) =
+ importsCode(usedNames.toSet)
+
+ /** Code to access a variable with the specified name */
+ private def fullPath(vname: String): String = s"$objectName$accessPath.`$vname`"
+
+ /** Code to access a variable with the specified name */
+ private def fullPath(vname: Name): String = fullPath(vname.toString)
+
+ /** the line of code to compute */
+ private def toCompute = line
+
+ /** generate the source code for the object that computes this request
+ * TODO Reformulate in a functional way
+ */
+ private def objectSourceCode: String =
+ stringFrom { code =>
+ // header for the wrapper object
+ code.println(s"object $objectName{")
+ code.print(importsPreamble)
+ code.println(toCompute)
+ handlers.foreach(_.extraCodeToEvaluate(this,code))
+ code.println(importsTrailer)
+ //end the wrapper object
+ code.println(";}")
+ }
+
+ /** Types of variables defined by this request. They are computed
+ after compilation of the main object */
+ private var typeOf: Map[Name, String] = _
+
+ /** generate source code for the object that retrieves the result
+ from objectSourceCode */
+ private def resultObjectSourceCode: String =
+ stringFrom(code => {
+ code.println(s"object $resultObjectName")
+ code.println("{ val result: String = {")
+ code.println(s"$objectName$accessPath;") // evaluate the object, to make sure its constructor is run
+ code.print("(\"\"") // print an initial empty string, so later code can
+ // uniformly be: + morestuff
+ handlers.foreach(_.resultExtractionCode(this, code))
+ code.println("\n)}")
+ code.println(";}")
+ })
+
+
+ /** Compile the object file. Returns whether the compilation succeeded.
+ * If all goes well, the "types" map is computed. */
+ def compile(): Boolean = {
+ val compileCtx = compileSources(
+ List(new SourceFile("<console>", objectSourceCode.toCharArray)))
+ !compileCtx.reporter.hasErrors && {
+ this.typeOf = findTypes(compileCtx)
+ val resultCtx = compileSources(
+ List(new SourceFile("<console>", resultObjectSourceCode.toCharArray)))
+ !resultCtx.reporter.hasErrors
+ }
+ }
+
+ /** Dig the types of all bound variables out of the compiler run.
+ * TODO: Change the interface so that we typecheck, and then transform
+ * directly. Treating the compiler as less of a blackbox will require
+ * much less magic here.
+ */
+ private def findTypes(implicit ctx: Context): Map[Name, String] = {
+ def valAndVarNames = handlers.flatMap(_.valAndVarNames)
+ def defNames = handlers.flatMap(_.defNames)
+
+ def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
+ /** the outermost wrapper object */
+ val outerResObjSym: Symbol =
+ defn.EmptyPackageClass.info.decl(objectName.toTermName).symbol
+
+ /** the innermost object inside the wrapper, found by
+ * following accessPath into the outer one. */
+ val resObjSym =
+ (accessPath.split("\\.")).foldLeft(outerResObjSym) { (sym,str) =>
+ if (str.isEmpty) sym
+ else
+ ctx.atPhase(ctx.typerPhase.next) { implicit ctx =>
+ sym.info.member(str.toTermName).symbol
+ }
+ }
+
+ names.foldLeft(Map.empty[Name,String]) { (map, name) =>
+ val rawType =
+ ctx.atPhase(ctx.typerPhase.next) { implicit ctx =>
+ resObjSym.info.member(name).info
+ }
+
+ // the types are all =>T; remove the =>
+ val cleanedType = rawType.widenExpr
+
+ map + (name ->
+ ctx.atPhase(ctx.typerPhase.next) { implicit ctx =>
+ cleanedType.show
+ })
+ }
+ }
+
+ val names1 = getTypes(valAndVarNames, n => n.toTermName.fieldName)
+ val names2 = getTypes(defNames, identity)
+ names1 ++ names2
+ }
+
+ /** Sets both System.{out,err} and Console.{out,err} to supplied
+ * `os: OutputStream`
+ */
+ private def withOutput[T](os: ByteOutputStream)(op: ByteOutputStream => T) = {
+ val ps = new PrintStream(os)
+ val oldOut = System.out
+ val oldErr = System.err
+ System.setOut(ps)
+ System.setErr(ps)
+
+ try {
+ Console.withOut(os)(Console.withErr(os)(op(os)))
+ } finally {
+ System.setOut(oldOut)
+ System.setErr(oldErr)
+ }
+ }
+
+ /** load and run the code using reflection.
+ * @return A pair consisting of the run's result as a `List[String]`, and
+ * a boolean indicating whether the run succeeded without throwing
+ * an exception.
+ */
+ def loadAndRun(): (List[String], Boolean) = {
+ val interpreterResultObject: Class[_] =
+ Class.forName(resultObjectName, true, classLoader)
+ val valMethodRes: java.lang.reflect.Method =
+ interpreterResultObject.getMethod("result")
+ try {
+ withOutput(new ByteOutputStream) { ps =>
+ val rawRes = valMethodRes.invoke(interpreterResultObject).toString
+ val res =
+ if (ictx.useColors) new String(SyntaxHighlighting(rawRes).toArray)
+ else rawRes
+ val prints = ps.toString("utf-8")
+ val printList = if (prints != "") prints :: Nil else Nil
+
+ if (!delayOutput) out.print(prints)
+
+ (printList :+ res, true)
+ }
+ } catch {
+ case NonFatal(ex) =>
+ def cause(ex: Throwable): Throwable =
+ if (ex.getCause eq null) ex else cause(ex.getCause)
+ val orig = cause(ex)
+ (stringFrom(str => orig.printStackTrace(str)) :: Nil, false)
+ }
+ }
+
+ /** Compute imports that allow definitions from previous
+ * requests to be visible in a new request. Returns
+ * three pieces of related code as strings:
+ *
+ * 1. A _preamble_: An initial code fragment that should go before
+ * the code of the new request.
+ *
+ * 2. A _trailer_: A code fragment that should go after the code
+ * of the new request.
+ *
+ * 3. An _access path_ which can be traversed to access
+ * any bindings inside code wrapped by #1 and #2 .
+ *
+ * The argument is a set of Names that need to be imported.
+ *
+ * Limitations: This method is not as precise as it could be.
+ * (1) It does not process wildcard imports to see what exactly
+ * they import.
+ * (2) If it imports any names from a request, it imports all
+ * of them, which is not really necessary.
+ * (3) It imports multiple same-named implicits, but only the
+ * last one imported is actually usable.
+ */
+ private def importsCode(wanted: Set[Name]): (String, String, String) = {
+ /** Narrow down the list of requests from which imports
+ * should be taken. Removes requests which cannot contribute
+ * useful imports for the specified set of wanted names.
+ */
+ def reqsToUse: List[(Request, StatementInfo)] = {
+ /** Loop through a list of StatementHandlers and select
+ * which ones to keep. 'wanted' is the set of
+ * names that need to be imported.
+ */
+ def select(reqs: List[(Request, StatementInfo)], wanted: Set[Name]): List[(Request, StatementInfo)] = {
+ reqs match {
+ case Nil => Nil
+
+ case (req, handler) :: rest =>
+ val keepit =
+ (handler.definesImplicit ||
+ handler.importsWildcard ||
+ handler.importedNames.exists(wanted.contains(_)) ||
+ handler.boundNames.exists(wanted.contains(_)))
+
+ val newWanted =
+ if (keepit) {
+ (wanted
+ ++ handler.usedNames
+ -- handler.boundNames
+ -- handler.importedNames)
+ } else {
+ wanted
+ }
+
+ val restToKeep = select(rest, newWanted)
+
+ if (keepit)
+ (req, handler) :: restToKeep
+ else
+ restToKeep
+ }
+ }
+
+ val rhpairs = for {
+ req <- prevRequests.toList.reverse
+ handler <- req.handlers
+ } yield (req, handler)
+
+ select(rhpairs, wanted).reverse
+ }
+
+ val preamble = new StringBuffer
+ val trailingBraces = new StringBuffer
+ val accessPath = new StringBuffer
+ val impname = INTERPRETER_IMPORT_WRAPPER
+ val currentImps = mutable.Set[Name]()
+
+ // add code for a new object to hold some imports
+ def addWrapper(): Unit = {
+ preamble.append("object " + impname + "{\n")
+ trailingBraces.append("}\n")
+ accessPath.append("." + impname)
+ currentImps.clear
+ }
+
+ addWrapper()
+
+ // loop through previous requests, adding imports
+ // for each one
+ for ((req, handler) <- reqsToUse) {
+ // If the user entered an import, then just use it
+
+ // add an import wrapping level if the import might
+ // conflict with some other import
+ if (handler.importsWildcard ||
+ currentImps.exists(handler.importedNames.contains))
+ if (!currentImps.isEmpty)
+ addWrapper()
+
+ if (handler.statement.isInstanceOf[Import])
+ preamble.append(handler.statement.show + ";\n")
+
+ // give wildcard imports a import wrapper all to their own
+ if (handler.importsWildcard)
+ addWrapper()
+ else
+ currentImps ++= handler.importedNames
+
+ // For other requests, import each bound variable.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ for (imv <- handler.boundNames) {
+ if (currentImps.contains(imv))
+ addWrapper()
+ preamble.append("import ")
+ preamble.append(req.objectName + req.accessPath + ".`" + imv + "`;\n")
+ currentImps += imv
+ }
+ }
+
+ addWrapper() // Add one extra wrapper, to prevent warnings
+ // in the frequent case of redefining
+ // the value bound in the last interpreter
+ // request.
+
+ (preamble.toString, trailingBraces.toString, accessPath.toString)
+ }
+
+ // ------ Handlers ------------------------------------------
+
+ /** Class to handle one statement among all the statements included
+ * in a single interpreter request.
+ */
+ private sealed abstract class StatementHandler(val statement: Tree) extends StatementInfo {
+ val usedNames: List[Name] = {
+ val ivt = new UntypedTreeAccumulator[mutable.Set[Name]] {
+ override def apply(ns: mutable.Set[Name], tree: Tree)(implicit ctx: Context) =
+ tree match {
+ case Ident(name) => ns += name
+ case _ => foldOver(ns, tree)
+ }
+ }
+ ivt.foldOver(HashSet(), statement).toList
+ }
+ val boundNames: List[Name] = Nil
+ def valAndVarNames: List[Name] = Nil
+ def defNames: List[Name] = Nil
+ val importsWildcard = false
+ val importedNames: Seq[Name] = Nil
+ val definesImplicit = statement match {
+ case tree: MemberDef => tree.mods.is(Flags.Implicit)
+ case _ => false
+ }
+
+ def extraCodeToEvaluate(req: Request, code: PrintWriter) = {}
+ def resultExtractionCode(req: Request, code: PrintWriter) = {}
+ }
+
+ private class GenericHandler(statement: Tree) extends StatementHandler(statement)
+
+ private abstract class ValOrPatHandler(statement: Tree)
+ extends StatementHandler(statement) {
+ override val boundNames: List[Name] = _boundNames
+ override def valAndVarNames = boundNames
+
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ if (!shouldShowResult(req)) return
+ val resultExtractors = boundNames.map(name => resultExtractor(req, name))
+ code.print(resultExtractors.mkString(""))
+ }
+
+ private def resultExtractor(req: Request, varName: Name): String = {
+ val prettyName = varName.decode
+ val varType = string2code(req.typeOf(varName))
+ val fullPath = req.fullPath(varName)
+
+ s""" + "$prettyName: $varType = " + {
+ | if ($fullPath.asInstanceOf[AnyRef] != null) {
+ | (if ($fullPath.toString().contains('\\n')) "\\n" else "") +
+ | $fullPath.toString() + "\\n"
+ | } else {
+ | "null\\n"
+ | }
+ |}""".stripMargin
+ }
+
+ protected def _boundNames: List[Name]
+ protected def shouldShowResult(req: Request): Boolean
+ }
+
+ private class ValHandler(statement: ValDef) extends ValOrPatHandler(statement) {
+ override def _boundNames = List(statement.name)
+
+ override def shouldShowResult(req: Request): Boolean =
+ !statement.mods.is(Flags.AccessFlags) &&
+ !(isGeneratedVarName(statement.name.toString) &&
+ req.typeOf(statement.name.encode) == "Unit")
+ }
+
+
+ private class PatHandler(statement: PatDef) extends ValOrPatHandler(statement) {
+ override def _boundNames = statement.pats.flatMap(findVariableNames)
+
+ override def shouldShowResult(req: Request): Boolean =
+ !statement.mods.is(Flags.AccessFlags)
+
+ private def findVariableNames(tree: Tree): List[Name] = tree match {
+ case Ident(name) if name.toString != "_" => List(name)
+ case _ => VariableNameFinder(Nil, tree).reverse
+ }
+
+ private object VariableNameFinder extends UntypedDeepFolder[List[Name]](
+ (acc: List[Name], t: Tree) => t match {
+ case _: BackquotedIdent => acc
+ case Ident(name) if name.isVariableName && name.toString != "_" => name :: acc
+ case Bind(name, _) if name.isVariableName => name :: acc
+ case _ => acc
+ }
+ )
+ }
+
+ private class DefHandler(defDef: DefDef) extends StatementHandler(defDef) {
+ override val boundNames = List(defDef.name)
+ override def defNames = boundNames
+
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ if (!defDef.mods.is(Flags.AccessFlags))
+ code.print("+\"" + string2code(defDef.name.toString) + ": " +
+ string2code(req.typeOf(defDef.name)) + "\\n\"")
+ }
+ }
+
+ private class AssignHandler(statement: Assign) extends StatementHandler(statement) {
+ val lhs = statement.lhs.asInstanceOf[Ident] // an unfortunate limitation
+
+ val helperName = newInternalVarName().toTermName
+ override val valAndVarNames = List(helperName)
+
+ override def extraCodeToEvaluate(req: Request, code: PrintWriter): Unit = {
+ code.println(i"val $helperName = ${statement.lhs};")
+ }
+
+ /** Print out lhs instead of the generated varName */
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ code.print(" + \"" + lhs.show + ": " +
+ string2code(req.typeOf(helperName.encode)) +
+ " = \" + " +
+ string2code(req.fullPath(helperName))
+ + " + \"\\n\"")
+ }
+ }
+
+ private class ModuleHandler(module: ModuleDef) extends StatementHandler(module) {
+ override val boundNames = List(module.name)
+
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ code.println(" + \"defined module " +
+ string2code(module.name.toString)
+ + "\\n\"")
+ }
+ }
+
+ private class ClassHandler(classdef: TypeDef)
+ extends StatementHandler(classdef) {
+ override val boundNames =
+ List(classdef.name) :::
+ (if (classdef.mods.is(Flags.Case))
+ List(classdef.name.toTermName)
+ else
+ Nil)
+
+ // TODO: MemberDef.keyword does not include "trait";
+ // otherwise it could be used here
+ def keyword: String =
+ if (classdef.mods.is(Flags.Trait)) "trait" else "class"
+
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ code.print(
+ " + \"defined " +
+ keyword +
+ " " +
+ string2code(classdef.name.toString) +
+ "\\n\"")
+ }
+ }
+
+ private class TypeAliasHandler(typeDef: TypeDef)
+ extends StatementHandler(typeDef) {
+ override val boundNames =
+ if (!typeDef.mods.is(Flags.AccessFlags) && !typeDef.rhs.isInstanceOf[TypeBoundsTree])
+ List(typeDef.name)
+ else
+ Nil
+
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ code.println(" + \"defined type alias " +
+ string2code(typeDef.name.toString) + "\\n\"")
+ }
+ }
+
+ private class ImportHandler(imp: Import) extends StatementHandler(imp) {
+ override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
+ code.println("+ \"" + imp.show + "\\n\"")
+ }
+
+ def isWildcardSelector(tree: Tree) = tree match {
+ case Ident(nme.USCOREkw) => true
+ case _ => false
+ }
+
+ /** Whether this import includes a wildcard import */
+ override val importsWildcard = imp.selectors.exists(isWildcardSelector)
+
+ /** The individual names imported by this statement */
+ override val importedNames: Seq[Name] =
+ imp.selectors.filterNot(isWildcardSelector).flatMap {
+ case sel: RefTree => List(sel.name.toTypeName, sel.name.toTermName)
+ case _ => Nil
+ }
+ }
+
+ } // end Request
+
+ // ------- String handling ----------------------------------
+
+ /** next line number to use */
+ private var nextLineNo = 0
+
+ /** allocate a fresh line name */
+ private def newLineName = {
+ val num = nextLineNo
+ nextLineNo += 1
+ INTERPRETER_LINE_PREFIX + num
+ }
+
+ private def currentLineName =
+ INTERPRETER_LINE_PREFIX + (nextLineNo - 1)
+
+ /** next result variable number to use */
+ private var nextVarNameNo = 0
+
+ /** allocate a fresh variable name */
+ private def newVarName = {
+ val num = nextVarNameNo
+ nextVarNameNo += 1
+ INTERPRETER_VAR_PREFIX + num
+ }
+
+ /** next internal variable number to use */
+ private var nextInternalVarNo = 0
+
+ /** allocate a fresh internal variable name */
+ private def newInternalVarName() = {
+ val num = nextVarNameNo
+ nextVarNameNo += 1
+ INTERPRETER_SYNTHVAR_PREFIX + num
+ }
+
+ /** Check if a name looks like it was generated by newVarName */
+ private def isGeneratedVarName(name: String): Boolean =
+ name.startsWith(INTERPRETER_VAR_PREFIX) && {
+ val suffix = name.drop(INTERPRETER_VAR_PREFIX.length)
+ suffix.forall(_.isDigit)
+ }
+
+ /** generate a string using a routine that wants to write on a stream */
+ private def stringFrom(writer: PrintWriter => Unit): String = {
+ val stringWriter = new StringWriter()
+ val stream = new NewLinePrintWriter(stringWriter)
+ writer(stream)
+ stream.close()
+ stringWriter.toString
+ }
+
+ /** Truncate a string if it is longer than settings.maxPrintString */
+ private def truncPrintString(str: String)(implicit ctx: Context): String = {
+ val maxpr = ctx.settings.XreplLineWidth.value
+
+ if (maxpr <= 0)
+ return str
+
+ if (str.length <= maxpr)
+ return str
+
+ val trailer = "..."
+ if (maxpr >= trailer.length-1)
+ str.substring(0, maxpr-3) + trailer + "\n"
+ else
+ str.substring(0, maxpr-1)
+ }
+
+ /** Clean up a string for output */
+ private def clean(str: String)(implicit ctx: Context) =
+ truncPrintString(stripWrapperGunk(str))
+}
+
+/** Utility methods for the Interpreter. */
+object CompilingInterpreter {
+ val INTERPRETER_WRAPPER_SUFFIX = "$object"
+ val INTERPRETER_LINE_PREFIX = "line"
+ val INTERPRETER_VAR_PREFIX = "res"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val INTERPRETER_SYNTHVAR_PREFIX = "synthvar$"
+
+ /** Delete a directory tree recursively. Use with care!
+ */
+ private[repl] def deleteRecursively(path: File): Unit = {
+ path match {
+ case _ if !path.exists =>
+ ()
+ case _ if path.isDirectory =>
+ for (p <- path.listFiles)
+ deleteRecursively(p)
+ path.delete
+ case _ =>
+ path.delete
+ }
+ }
+
+ /** Heuristically strip interpreter wrapper prefixes
+ * from an interpreter output string.
+ */
+ def stripWrapperGunk(str: String): String = {
+ val wrapregex = "(line[0-9]+\\$object[$.])?(\\$iw[$.])*"
+ str.replaceAll(wrapregex, "")
+ }
+
+ /** Convert a string into code that can recreate the string.
+ * This requires replacing all special characters by escape
+ * codes. It does not add the surrounding " marks. */
+ def string2code(str: String): String = {
+ /** Convert a character to a backslash-u escape */
+ def char2uescape(c: Char): String = {
+ var rest = c.toInt
+ val buf = new StringBuilder
+ for (i <- 1 to 4) {
+ buf ++= (rest % 16).toHexString
+ rest = rest / 16
+ }
+ "\\" + "u" + buf.toString.reverse
+ }
+ val res = new StringBuilder
+ for (c <- str) {
+ if ("'\"\\" contains c) {
+ res += '\\'
+ res += c
+ } else if (!c.isControl) {
+ res += c
+ } else {
+ res ++= char2uescape(c)
+ }
+ }
+ res.toString
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ConsoleWriter.scala b/compiler/src/dotty/tools/dotc/repl/ConsoleWriter.scala
new file mode 100644
index 000000000..9387f366a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ConsoleWriter.scala
@@ -0,0 +1,21 @@
+package dotty.tools
+package dotc
+package repl
+import java.io.Writer
+
+/** A Writer that writes onto the Scala Console.
+ *
+ * @author Lex Spoon
+ * @version 1.0
+ */
+class ConsoleWriter extends Writer {
+ def close = flush
+
+ def flush = Console.flush
+
+ def write(cbuf: Array[Char], off: Int, len: Int): Unit =
+ if (len > 0)
+ write(new String(cbuf, off, len))
+
+ override def write(str: String): Unit = Console.print(str)
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/InteractiveReader.scala b/compiler/src/dotty/tools/dotc/repl/InteractiveReader.scala
new file mode 100644
index 000000000..07ce23717
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/InteractiveReader.scala
@@ -0,0 +1,20 @@
+package dotty.tools
+package dotc
+package repl
+
+import dotc.core.Contexts.Context
+
+/** Reads lines from an input stream */
+trait InteractiveReader {
+ def readLine(prompt: String): String
+ val interactive: Boolean
+}
+
+/** The current Scala REPL know how to do this flexibly.
+ */
+object InteractiveReader {
+ /** Create an interactive reader */
+ def createDefault(in: Interpreter)(implicit ctx: Context): InteractiveReader = {
+ new AmmoniteReader(in)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/Interpreter.scala b/compiler/src/dotty/tools/dotc/repl/Interpreter.scala
new file mode 100644
index 000000000..edcc5b153
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/Interpreter.scala
@@ -0,0 +1,45 @@
+package dotty.tools
+package dotc
+package repl
+
+import core.Contexts.Context
+
+/** This object defines the type of interpreter results */
+object Interpreter {
+
+ /** A result from interpreting one line of input. */
+ abstract sealed class Result
+
+ /** The line was interpreted successfully. */
+ case object Success extends Result
+
+ /** The line was erroneous in some way. */
+ case object Error extends Result
+
+ /** The input was incomplete. The caller should request more input.
+ */
+ case object Incomplete extends Result
+}
+
+/** The exported functionality of the interpreter */
+trait Interpreter {
+ import Interpreter._
+
+ /** Interpret one line of input. All feedback, including parse errors and
+ * evaluation results, are printed via the context's reporter. Values
+ * defined are available for future interpreted strings.
+ */
+ def interpret(line: String)(implicit ctx: Context): Result
+
+ /** Tries to bind an id to a value, returns the outcome of trying to bind */
+ def bind(id: String, boundType: String, value: AnyRef)(implicit ctx: Context): Result
+
+ /** Suppress output during evaluation of `operation`. */
+ def beQuietDuring[T](operation: => T): T
+
+ /** Suppresses output and saves it for `lastOutput` to collect */
+ def delayOutputDuring[T](operation: => T): T
+
+ /** Gets the last output not printed immediately */
+ def lastOutput(): Seq[String]
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala b/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala
new file mode 100644
index 000000000..b3ac41c55
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala
@@ -0,0 +1,210 @@
+package dotty.tools
+package dotc
+package repl
+
+import java.io.{BufferedReader, File, FileReader, PrintWriter}
+import java.io.IOException
+import java.lang.{ClassLoader, System}
+import scala.concurrent.{Future, Await}
+import scala.concurrent.duration.Duration
+import reporting.Reporter
+import core._
+import Contexts._
+import annotation.tailrec
+import scala.concurrent.ExecutionContext.Implicits.global
+
+/** The interactive shell. It provides a read-eval-print loop around
+ * the Interpreter class.
+ * After instantiation, clients should call the `run` method.
+ *
+ * @author Moez A. Abdel-Gawad
+ * @author Lex Spoon
+ * @author Martin Odersky
+ */
+class InterpreterLoop(compiler: Compiler, config: REPL.Config)(implicit ctx: Context) {
+ import config._
+
+ val interpreter = compiler.asInstanceOf[Interpreter]
+
+ private var in = input(interpreter)
+
+ /** The context class loader at the time this object was created */
+ protected val originalClassLoader =
+ Thread.currentThread.getContextClassLoader
+
+ /** A reverse list of commands to replay if the user
+ * requests a :replay */
+ var replayCommandsRev: List[String] = Nil
+
+ /** A list of commands to replay if the user requests a :replay */
+ def replayCommands = replayCommandsRev.reverse
+
+ /** Record a command for replay should the user request a :replay */
+ def addReplay(cmd: String) =
+ replayCommandsRev = cmd :: replayCommandsRev
+
+ /** Close the interpreter */
+ def closeInterpreter()(implicit ctx: Context): Unit = {
+ ctx.reporter.flush()
+ Thread.currentThread.setContextClassLoader(originalClassLoader)
+ }
+
+ /** print a friendly help message */
+ def printHelp(): Unit = {
+ printWelcome()
+ output.println("Type :load followed by a filename to load a Scala file.")
+ output.println("Type :replay to reset execution and replay all previous commands.")
+ output.println("Type :quit to exit the interpreter.")
+ }
+
+ /** Print a welcome message */
+ def printWelcome(): Unit = {
+ output.println(s"Welcome to Scala$version " + " (" +
+ System.getProperty("java.vm.name") + ", Java " + System.getProperty("java.version") + ")." )
+ output.println("Type in expressions to have them evaluated.")
+ output.println("Type :help for more information.")
+ output.flush()
+ }
+
+ val gitHash = ManifestInfo.attributes.getOrElse("Git-Hash", "unknown")
+ val version = s".next (pre-alpha, git-hash: $gitHash)"
+
+ /** The main read-eval-print loop for the interpreter. It calls
+ * `command()` for each line of input.
+ */
+ @tailrec final def repl(line: String = in.readLine(prompt)): Unit =
+ if (line != null) {
+ val (keepGoing, finalLineOpt) = command(line)
+ if (keepGoing) {
+ finalLineOpt.foreach(addReplay)
+ output.flush()
+ repl()
+ }
+ }
+
+ /** interpret all lines from a specified file */
+ def interpretAllFrom(filename: String): Unit = {
+ import java.nio.file.{Files, Paths}
+ import scala.collection.JavaConversions._
+ try {
+ val lines = Files.readAllLines(Paths.get(filename)).mkString("\n")
+ output.println("Loading " + filename + "...")
+ output.flush
+ interpreter.interpret(lines)
+ } catch {
+ case _: IOException =>
+ output.println("Error opening file: " + filename)
+ }
+ }
+
+ /** create a new interpreter and replay all commands so far */
+ def replay(): Unit = {
+ for (cmd <- replayCommands) {
+ output.println("Replaying: " + cmd)
+ output.flush() // because maybe cmd will have its own output
+ command(cmd)
+ output.println
+ }
+ }
+
+ /** Run one command submitted by the user. Three values are returned:
+ * (1) whether to keep running, (2) the line to record for replay,
+ * if any. */
+ def command(line: String): (Boolean, Option[String]) = {
+ def withFile(command: String)(action: String => Unit): Unit = {
+ val spaceIdx = command.indexOf(' ')
+ if (spaceIdx <= 0) {
+ output.println("That command requires a filename to be specified.")
+ return
+ }
+ val filename = command.substring(spaceIdx).trim
+ if (!new File(filename).exists) {
+ output.println("That file does not exist")
+ return
+ }
+ action(filename)
+ }
+
+ val helpRegexp = ":h(e(l(p)?)?)?"
+ val quitRegexp = ":q(u(i(t)?)?)?"
+ val loadRegexp = ":l(o(a(d)?)?)?.*"
+ val replayRegexp = ":r(e(p(l(a(y)?)?)?)?)?.*"
+ val lastOutput = interpreter.lastOutput()
+
+ var shouldReplay: Option[String] = None
+
+ if (line.matches(helpRegexp))
+ printHelp()
+ else if (line.matches(quitRegexp))
+ return (false, None)
+ else if (line.matches(loadRegexp)) {
+ withFile(line)(f => {
+ interpretAllFrom(f)
+ shouldReplay = Some(line)
+ })
+ }
+ else if (line matches replayRegexp)
+ replay()
+ else if (line startsWith ":")
+ output.println("Unknown command. Type :help for help.")
+ else
+ shouldReplay = lastOutput match { // don't interpret twice
+ case Nil => interpretStartingWith(line)
+ case oldRes =>
+ oldRes foreach output.print
+ Some(line)
+ }
+
+ (true, shouldReplay)
+ }
+
+ def silentlyRun(cmds: List[String]): Unit = cmds.foreach { cmd =>
+ interpreter.beQuietDuring(interpreter.interpret(cmd))
+ }
+
+ def silentlyBind(values: Array[(String, Any)]): Unit = values.foreach { case (id, value) =>
+ interpreter.beQuietDuring(
+ interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value.asInstanceOf[AnyRef]))
+ }
+
+ /** Interpret expressions starting with the first line.
+ * Read lines until a complete compilation unit is available
+ * or until a syntax error has been seen. If a full unit is
+ * read, go ahead and interpret it. Return the full string
+ * to be recorded for replay, if any.
+ */
+ def interpretStartingWith(code: String): Option[String] =
+ interpreter.interpret(code) match {
+ case Interpreter.Success => Some(code)
+ case _ => None
+ }
+/*
+ def loadFiles(settings: Settings) {
+ settings match {
+ case settings: GenericRunnerSettings =>
+ for (filename <- settings.loadfiles.value) {
+ val cmd = ":load " + filename
+ command(cmd)
+ replayCommandsRev = cmd :: replayCommandsRev
+ output.println()
+ }
+ case _ =>
+ }
+ }
+*/
+ def run(): Reporter = {
+ // loadFiles(settings)
+ try {
+ if (!ctx.reporter.hasErrors) { // if there are already errors, no sense to continue
+ printWelcome()
+ silentlyRun(config.initialCommands)
+ silentlyBind(config.boundValues)
+ repl(in.readLine(prompt))
+ silentlyRun(config.cleanupCommands)
+ }
+ } finally {
+ closeInterpreter()
+ }
+ ctx.reporter
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/Main.scala b/compiler/src/dotty/tools/dotc/repl/Main.scala
new file mode 100644
index 000000000..48ed3e788
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/Main.scala
@@ -0,0 +1,28 @@
+package dotty.tools
+package dotc
+package repl
+
+/* This REPL was adapted from an old (2008-ish) version of the Scala
+ * REPL. The original version from which the adaptation was done is found in:
+ *
+ * https://github.com/odersky/legacy-svn-scala/tree/spoon
+ *
+ * The reason this version was picked instead of a more current one is that
+ * the older version is much smaller, therefore easier to port. It is also
+ * considerably less intertwined with nsc than later versions.
+ *
+ * There are a number of TODOs:
+ *
+ * - figure out why we can launch REPL only with `java`, not with `scala`.
+ * - make a doti command (urgent, easy)
+ * - create or port REPL tests (urgent, intermediate)
+ * - copy improvements of current Scala REPL wrt to this version
+ * (somewhat urgent, intermediate)
+ * - re-enable bindSettings (not urgent, easy, see TODO in InterpreterLoop.scala)
+ * - make string generation more functional (not urgent, easy)
+ * - better handling of ^C (not urgent, intermediate)
+ * - syntax highlighting (not urgent, intermediate)
+ * - integrate with presentation compiler for command completion (not urgent, hard)
+ */
+/** The main entry point of the REPL */
+object Main extends REPL
diff --git a/compiler/src/dotty/tools/dotc/repl/ManifestInfo.scala b/compiler/src/dotty/tools/dotc/repl/ManifestInfo.scala
new file mode 100644
index 000000000..206dccd67
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ManifestInfo.scala
@@ -0,0 +1,20 @@
+package dotty.tools.dotc.repl
+
+import java.net.JarURLConnection
+import scala.collection.JavaConversions._
+
+object ManifestInfo {
+
+ val attributes: Map[String, String] = {
+ for {
+ resourceUrl <- Option(getClass.getResource(getClass.getSimpleName + ".class"))
+ urlConnection = resourceUrl.openConnection() if urlConnection.isInstanceOf[JarURLConnection]
+ manifest <- Option(urlConnection.asInstanceOf[JarURLConnection].getManifest)
+ } yield {
+ manifest.getMainAttributes.foldLeft(Map[String, String]())(
+ (map, attribute) => map + (attribute._1.toString -> attribute._2.toString)
+ )
+ }
+ }.getOrElse(Map())
+
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/NewLinePrintWriter.scala b/compiler/src/dotty/tools/dotc/repl/NewLinePrintWriter.scala
new file mode 100644
index 000000000..8e36a0ae4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/NewLinePrintWriter.scala
@@ -0,0 +1,11 @@
+package dotty.tools
+package dotc
+package repl
+import java.io.{Writer, PrintWriter}
+
+class NewLinePrintWriter(out: Writer, autoFlush: Boolean)
+extends PrintWriter(out, autoFlush) {
+ def this(out: Writer) = this(out, false)
+ override def println(): Unit = { print("\n"); flush() }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/repl/REPL.scala b/compiler/src/dotty/tools/dotc/repl/REPL.scala
new file mode 100644
index 000000000..211e3c931
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/REPL.scala
@@ -0,0 +1,100 @@
+package dotty.tools
+package dotc
+package repl
+
+import core.Contexts.Context
+import reporting.Reporter
+import io.{AbstractFile, PlainFile, VirtualDirectory}
+import scala.reflect.io.{PlainDirectory, Directory}
+import java.io.{BufferedReader, File => JFile, FileReader, PrintWriter}
+import java.net.{URL, URLClassLoader}
+
+/** A compiler which stays resident between runs.
+ * Usage:
+ *
+ * > scala dotty.tools.dotc.Resident <options> <initial files>
+ *
+ * dotc> "more options and files to compile"
+ *
+ * ...
+ *
+ * dotc> :reset // reset all options to the ones passed on the command line
+ *
+ * ...
+ *
+ * dotc> :q // quit
+ */
+class REPL extends Driver {
+
+ lazy val config = new REPL.Config
+
+ override def setup(args: Array[String], rootCtx: Context): (List[String], Context) = {
+ val (strs, ctx) = super.setup(args, rootCtx)
+ (strs, config.context(ctx))
+ }
+
+ override def newCompiler(implicit ctx: Context): Compiler =
+ new repl.CompilingInterpreter(config.output, ctx, config.classLoader)
+
+ override def sourcesRequired = false
+
+ override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter = {
+ if (fileNames.isEmpty)
+ new InterpreterLoop(compiler, config).run()
+ else
+ ctx.error(s"don't now what to do with $fileNames%, %")
+ ctx.reporter
+ }
+}
+
+object REPL {
+ class Config {
+ val prompt = "scala> "
+ val continuationPrompt = " "
+ val version = ".next (pre-alpha)"
+
+ def context(ctx: Context): Context = ctx
+
+ /** The first interpreted commands always take a couple of seconds due to
+ * classloading. To bridge the gap, we warm up the interpreter by letting
+ * it interpret at least a dummy line while waiting for the first line of
+ * input to be entered.
+ */
+ val initialCommands: List[String] =
+ "val theAnswerToLifeInTheUniverseAndEverything = 21 * 2" :: Nil
+
+ /** Before exiting, the interpreter will also run the cleanup commands
+ * issued in the variable below. This is useful if your REPL creates
+ * things during its run that should be dealt with before shutdown.
+ */
+ val cleanupCommands: List[String] = Nil
+
+ /** Initial values in the REPL can also be bound from runtime. Override
+ * this variable in the following manner to bind a variable at the start
+ * of the REPL session:
+ *
+ * {{{
+ * override val boundValues = Array("exampleList" -> List(1, 1, 2, 3, 5))
+ * }}}
+ *
+ * This is useful if you've integrated the REPL as part of your project
+ * and already have objects available during runtime that you'd like to
+ * inspect.
+ */
+ val boundValues: Array[(String, Any)] = Array.empty[(String, Any)]
+
+ /** To pass a custom ClassLoader to the Dotty REPL, overwride this value */
+ val classLoader: Option[ClassLoader] = None
+
+ /** The default input reader */
+ def input(in: Interpreter)(implicit ctx: Context): InteractiveReader = {
+ val emacsShell = System.getProperty("env.emacs", "") != ""
+ //println("emacsShell="+emacsShell) //debug
+ if (emacsShell) new SimpleReader()
+ else InteractiveReader.createDefault(in)
+ }
+
+ /** The default output writer */
+ def output: PrintWriter = new NewLinePrintWriter(new ConsoleWriter, true)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/SimpleReader.scala b/compiler/src/dotty/tools/dotc/repl/SimpleReader.scala
new file mode 100644
index 000000000..5fab47bbe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/SimpleReader.scala
@@ -0,0 +1,24 @@
+package dotty.tools
+package dotc
+package repl
+
+import java.io.{BufferedReader, PrintWriter}
+import dotc.core.Contexts.Context
+
+
+/** Reads using standard JDK API */
+class SimpleReader(
+ in: BufferedReader,
+ out: PrintWriter,
+ val interactive: Boolean)
+extends InteractiveReader {
+ def this() = this(Console.in, new PrintWriter(Console.out), true)
+
+ def readLine(prompt: String) = {
+ if (interactive) {
+ out.print(prompt)
+ out.flush()
+ }
+ in.readLine()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/Ansi.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/Ansi.scala
new file mode 100644
index 000000000..37c4de7b5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/Ansi.scala
@@ -0,0 +1,256 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+object Ansi {
+
+ /**
+ * Represents a single, atomic ANSI escape sequence that results in a
+ * color, background or decoration being added to the output.
+ *
+ * @param escape the actual ANSI escape sequence corresponding to this Attr
+ */
+ case class Attr private[Ansi](escape: Option[String], resetMask: Int, applyMask: Int) {
+ override def toString = escape.getOrElse("") + Console.RESET
+ def transform(state: Short) = ((state & ~resetMask) | applyMask).toShort
+
+ def matches(state: Short) = (state & resetMask) == applyMask
+ def apply(s: Ansi.Str) = s.overlay(this, 0, s.length)
+ }
+
+ object Attr {
+ val Reset = new Attr(Some(Console.RESET), Short.MaxValue, 0)
+
+ /**
+ * Quickly convert string-colors into [[Ansi.Attr]]s
+ */
+ val ParseMap = {
+ val pairs = for {
+ cat <- categories
+ color <- cat.all
+ str <- color.escape
+ } yield (str, color)
+ (pairs :+ (Console.RESET -> Reset)).toMap
+ }
+ }
+
+ /**
+ * Represents a set of [[Ansi.Attr]]s all occupying the same bit-space
+ * in the state `Short`
+ */
+ sealed abstract class Category() {
+ val mask: Int
+ val all: Seq[Attr]
+ lazy val bitsMap = all.map{ m => m.applyMask -> m}.toMap
+ def makeAttr(s: Option[String], applyMask: Int) = {
+ new Attr(s, mask, applyMask)
+ }
+ }
+
+ object Color extends Category {
+
+ val mask = 15 << 7
+ val Reset = makeAttr(Some("\u001b[39m"), 0 << 7)
+ val Black = makeAttr(Some(Console.BLACK), 1 << 7)
+ val Red = makeAttr(Some(Console.RED), 2 << 7)
+ val Green = makeAttr(Some(Console.GREEN), 3 << 7)
+ val Yellow = makeAttr(Some(Console.YELLOW), 4 << 7)
+ val Blue = makeAttr(Some(Console.BLUE), 5 << 7)
+ val Magenta = makeAttr(Some(Console.MAGENTA), 6 << 7)
+ val Cyan = makeAttr(Some(Console.CYAN), 7 << 7)
+ val White = makeAttr(Some(Console.WHITE), 8 << 7)
+
+ val all = Vector(
+ Reset, Black, Red, Green, Yellow,
+ Blue, Magenta, Cyan, White
+ )
+ }
+
+ object Back extends Category {
+ val mask = 15 << 3
+
+ val Reset = makeAttr(Some("\u001b[49m"), 0 << 3)
+ val Black = makeAttr(Some(Console.BLACK_B), 1 << 3)
+ val Red = makeAttr(Some(Console.RED_B), 2 << 3)
+ val Green = makeAttr(Some(Console.GREEN_B), 3 << 3)
+ val Yellow = makeAttr(Some(Console.YELLOW_B), 4 << 3)
+ val Blue = makeAttr(Some(Console.BLUE_B), 5 << 3)
+ val Magenta = makeAttr(Some(Console.MAGENTA_B), 6 << 3)
+ val Cyan = makeAttr(Some(Console.CYAN_B), 7 << 3)
+ val White = makeAttr(Some(Console.WHITE_B), 8 << 3)
+
+ val all = Seq(
+ Reset, Black, Red, Green, Yellow,
+ Blue, Magenta, Cyan, White
+ )
+ }
+
+ object Bold extends Category {
+ val mask = 1 << 0
+ val On = makeAttr(Some(Console.BOLD), 1 << 0)
+ val Off = makeAttr(None , 0 << 0)
+ val all = Seq(On, Off)
+ }
+
+ object Underlined extends Category {
+ val mask = 1 << 1
+ val On = makeAttr(Some(Console.UNDERLINED), 1 << 1)
+ val Off = makeAttr(None, 0 << 1)
+ val all = Seq(On, Off)
+ }
+
+ object Reversed extends Category {
+ val mask = 1 << 2
+ val On = makeAttr(Some(Console.REVERSED), 1 << 2)
+ val Off = makeAttr(None, 0 << 2)
+ val all = Seq(On, Off)
+ }
+
+ val hardOffMask = Bold.mask | Underlined.mask | Reversed.mask
+ val categories = List(Color, Back, Bold, Underlined, Reversed)
+
+ object Str {
+ @sharable lazy val ansiRegex = "\u001B\\[[;\\d]*m".r
+
+ implicit def parse(raw: CharSequence): Str = {
+ val chars = new Array[Char](raw.length)
+ val colors = new Array[Short](raw.length)
+ var currentIndex = 0
+ var currentColor = 0.toShort
+
+ val matches = ansiRegex.findAllMatchIn(raw)
+ val indices = Seq(0) ++ matches.flatMap { m => Seq(m.start, m.end) } ++ Seq(raw.length)
+
+ for {
+ Seq(start, end) <- indices.sliding(2).toSeq
+ if start != end
+ } {
+ val frag = raw.subSequence(start, end).toString
+ if (frag.charAt(0) == '\u001b' && Attr.ParseMap.contains(frag)) {
+ currentColor = Attr.ParseMap(frag).transform(currentColor)
+ } else {
+ var i = 0
+ while(i < frag.length){
+ chars(currentIndex) = frag(i)
+ colors(currentIndex) = currentColor
+ i += 1
+ currentIndex += 1
+ }
+ }
+ }
+
+ Str(chars.take(currentIndex), colors.take(currentIndex))
+ }
+ }
+
+ /**
+ * An [[Ansi.Str]]'s `color`s array is filled with shorts, each representing
+ * the ANSI state of one character encoded in its bits. Each [[Attr]] belongs
+ * to a [[Category]] that occupies a range of bits within each short:
+ *
+ * 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
+ * |-----------| |--------| |--------| | | |bold
+ * | | | | |reversed
+ * | | | |underlined
+ * | | |foreground-color
+ * | |background-color
+ * |unused
+ *
+ *
+ * The `0000 0000 0000 0000` short corresponds to plain text with no decoration
+ *
+ */
+ type State = Short
+
+ /**
+ * Encapsulates a string with associated ANSI colors and text decorations.
+ *
+ * Contains some basic string methods, as well as some ansi methods to e.g.
+ * apply particular colors or other decorations to particular sections of
+ * the [[Ansi.Str]]. [[render]] flattens it out into a `java.lang.String`
+ * with all the colors present as ANSI escapes.
+ *
+ */
+ case class Str private(chars: Array[Char], colors: Array[State]) {
+ require(chars.length == colors.length)
+
+ def ++(other: Str) = Str(chars ++ other.chars, colors ++ other.colors)
+ def splitAt(index: Int) = {
+ val (leftChars, rightChars) = chars.splitAt(index)
+ val (leftColors, rightColors) = colors.splitAt(index)
+ (new Str(leftChars, leftColors), new Str(rightChars, rightColors))
+ }
+
+ def length = chars.length
+ override def toString = render
+
+ def plainText = new String(chars.toArray)
+ def render = {
+ // Pre-size StringBuilder with approximate size (ansi colors tend
+ // to be about 5 chars long) to avoid re-allocations during growth
+ val output = new StringBuilder(chars.length + colors.length * 5)
+
+
+ var currentState = 0.toShort
+ /**
+ * Emit the ansi escapes necessary to transition
+ * between two states, if necessary.
+ */
+ def emitDiff(nextState: Short) = if (currentState != nextState){
+ // Any of these transitions from 1 to 0 within the hardOffMask
+ // categories cannot be done with a single ansi escape, and need
+ // you to emit a RESET followed by re-building whatever ansi state
+ // you previous had from scratch
+ if ((currentState & ~nextState & hardOffMask) != 0){
+ output.append(Console.RESET)
+ currentState = 0
+ }
+
+ var categoryIndex = 0
+ while(categoryIndex < categories.length){
+ val cat = categories(categoryIndex)
+ if ((cat.mask & currentState) != (cat.mask & nextState)){
+ val attr = cat.bitsMap(nextState & cat.mask)
+
+ if (attr.escape.isDefined) {
+ output.append(attr.escape.get)
+ }
+ }
+ categoryIndex += 1
+ }
+ }
+
+ var i = 0
+ while(i < colors.length){
+ // Emit ANSI escapes to change colors where necessary
+ emitDiff(colors(i))
+ currentState = colors(i)
+ output.append(chars(i))
+ i += 1
+ }
+
+ // Cap off the left-hand-side of the rendered string with any ansi escape
+ // codes necessary to rest the state to 0
+ emitDiff(0)
+ output.toString
+ }
+
+ /**
+ * Overlays the desired color over the specified range of the [[Ansi.Str]].
+ */
+ def overlay(overlayColor: Attr, start: Int, end: Int) = {
+ require(end >= start,
+ s"end:$end must be greater than start:$end in AnsiStr#overlay call"
+ )
+ val colorsOut = new Array[Short](colors.length)
+ var i = 0
+ while(i < colors.length){
+ if (i >= start && i < end) colorsOut(i) = overlayColor.transform(colors(i))
+ else colorsOut(i) = colors(i)
+ i += 1
+ }
+ new Str(chars, colorsOut)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/Filter.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/Filter.scala
new file mode 100644
index 000000000..9d34bb0f2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/Filter.scala
@@ -0,0 +1,61 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+object Filter {
+ def apply(id: String)(f: PartialFunction[TermInfo, TermAction]): Filter =
+ new Filter {
+ val op = f.lift
+ def identifier = id
+ }
+
+ def wrap(id: String)(f: TermInfo => Option[TermAction]): Filter =
+ new Filter {
+ val op = f
+ def identifier = id
+ }
+
+ /** Merges multiple [[Filter]]s into one. */
+ def merge(pfs: Filter*) = new Filter {
+ val op = (v1: TermInfo) => pfs.iterator.map(_.op(v1)).find(_.isDefined).flatten
+ def identifier = pfs.iterator.map(_.identifier).mkString(":")
+ }
+
+ val empty = Filter.merge()
+}
+
+/**
+ * The way you configure your terminal behavior; a trivial wrapper around a
+ * function, though you should provide a good `.toString` method to make
+ * debugging easier. The [[TermInfo]] and [[TermAction]] types are its
+ * interface to the terminal.
+ *
+ * [[Filter]]s are composed sequentially: if a filter returns `None` the next
+ * filter is tried, while if a filter returns `Some` that ends the cascade.
+ * While your `op` function interacts with the terminal purely through
+ * immutable case classes, the Filter itself is free to maintain its own state
+ * and mutate it whenever, even when returning `None` to continue the cascade.
+ */
+trait Filter {
+ val op: TermInfo => Option[TermAction]
+
+ /**
+ * the `.toString` of this object, except by making it separate we force
+ * the implementer to provide something and stop them from accidentally
+ * leaving it as the meaningless default.
+ */
+ def identifier: String
+ override def toString = identifier
+}
+
+/**
+ * A filter as an abstract class, letting you provide a [[filter]] instead of
+ * an `op`, automatically providing a good `.toString` for debugging, and
+ * providing a reasonable "place" inside the inheriting class/object to put
+ * state or helpers or other logic associated with the filter.
+ */
+abstract class DelegateFilter() extends Filter {
+ def filter: Filter
+ val op = filter.op
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala
new file mode 100644
index 000000000..c18b6a927
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala
@@ -0,0 +1,80 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+/**
+ * A collection of helpers that to simpify the common case of building filters
+ */
+object FilterTools {
+ val ansiRegex = "\u001B\\[[;\\d]*."
+
+ def offsetIndex(buffer: Vector[Char], in: Int) = {
+ var splitIndex = 0
+ var length = 0
+
+ while(length < in) {
+ ansiRegex.r.findPrefixOf(buffer.drop(splitIndex)) match {
+ case None =>
+ splitIndex += 1
+ length += 1
+ case Some(s) =>
+ splitIndex += s.length
+ }
+ }
+ splitIndex
+ }
+
+ /**
+ * Shorthand to construct a filter in the common case where you're
+ * switching on the prefix of the input stream and want to run some
+ * transformation on the buffer/cursor
+ */
+ def Case(s: String)
+ (f: (Vector[Char], Int, TermInfo) => (Vector[Char], Int)) = new Filter {
+ val op = new PartialFunction[TermInfo, TermAction] {
+ def isDefinedAt(x: TermInfo) = {
+
+ def rec(i: Int, c: LazyList[Int]): Boolean = {
+ if (i >= s.length) true
+ else if (c.head == s(i)) rec(i + 1, c.tail)
+ else false
+ }
+ rec(0, x.ts.inputs)
+ }
+
+ def apply(v1: TermInfo) = {
+ val (buffer1, cursor1) = f(v1.ts.buffer, v1.ts.cursor, v1)
+ TermState(
+ v1.ts.inputs.dropPrefix(s.map(_.toInt)).get,
+ buffer1,
+ cursor1
+ )
+ }
+
+ }.lift
+ def identifier = "Case"
+ }
+
+ /** Shorthand for pattern matching on [[TermState]] */
+ val TS = TermState
+
+ def findChunks(b: Vector[Char], c: Int) = {
+ val chunks = Terminal.splitBuffer(b)
+ // The index of the first character in each chunk
+ val chunkStarts = chunks.inits.map(x => x.length + x.sum).toStream.reverse
+ // Index of the current chunk that contains the cursor
+ val chunkIndex = chunkStarts.indexWhere(_ > c) match {
+ case -1 => chunks.length-1
+ case x => x - 1
+ }
+ (chunks, chunkStarts, chunkIndex)
+ }
+
+ def firstRow(cursor: Int, buffer: Vector[Char], width: Int) =
+ cursor < width && (buffer.indexOf('\n') >= cursor || buffer.indexOf('\n') == -1)
+
+ def lastRow(cursor: Int, buffer: Vector[Char], width: Int) =
+ (buffer.length - cursor) < width &&
+ (buffer.lastIndexOf('\n') < cursor || buffer.lastIndexOf('\n') == -1)
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/LICENSE b/compiler/src/dotty/tools/dotc/repl/ammonite/LICENSE
new file mode 100644
index 000000000..b15103580
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/LICENSE
@@ -0,0 +1,25 @@
+License
+=======
+
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Li Haoyi (haoyi.sg@gmail.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/Protocol.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/Protocol.scala
new file mode 100644
index 000000000..34d31aeca
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/Protocol.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+case class TermInfo(ts: TermState, width: Int)
+
+sealed trait TermAction
+case class Printing(ts: TermState, stdout: String) extends TermAction
+case class TermState(
+ inputs: LazyList[Int],
+ buffer: Vector[Char],
+ cursor: Int,
+ msg: Ansi.Str = ""
+) extends TermAction
+
+object TermState {
+ def unapply(ti: TermInfo): Option[(LazyList[Int], Vector[Char], Int, Ansi.Str)] =
+ TermState.unapply(ti.ts)
+
+ def unapply(ti: TermAction): Option[(LazyList[Int], Vector[Char], Int, Ansi.Str)] =
+ ti match {
+ case ts: TermState => TermState.unapply(ts)
+ case _ => None
+ }
+}
+
+case class ClearScreen(ts: TermState) extends TermAction
+case object Exit extends TermAction
+case class Result(s: String) extends TermAction
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala
new file mode 100644
index 000000000..d834cc10b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala
@@ -0,0 +1,81 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+/**
+ * One place to assign all the esotic control key input snippets to
+ * easy-to-remember names
+ */
+object SpecialKeys {
+
+ /**
+ * Lets you easily pattern match on characters modified by ctrl,
+ * or convert a character into its ctrl-ed version
+ */
+ object Ctrl {
+ def apply(c: Char) = (c - 96).toChar.toString
+ def unapply(i: Int): Option[Int] = Some(i + 96)
+ }
+
+ /**
+ * The string value you get when you hit the alt key
+ */
+ def Alt = "\u001b"
+
+
+ val Up = Alt+"[A"
+ val Down = Alt+"[B"
+ val Right = Alt+"[C"
+ val Left = Alt+"[D"
+
+ val Home = Alt+"OH"
+ val End = Alt+"OF"
+
+ // For some reason Screen makes these print different incantations
+ // from a normal snippet, so this causes issues like
+ // https://github.com/lihaoyi/Ammonite/issues/152 unless we special
+ // case them
+ val HomeScreen = Alt+"[1~"
+ val EndScreen = Alt+"[4~"
+
+ val ShiftUp = Alt+"[1;2A"
+ val ShiftDown = Alt+"[1;2B"
+ val ShiftRight = Alt+"[1;2C"
+ val ShiftLeft = Alt+"[1;2D"
+
+ val FnUp = Alt+"[5~"
+ val FnDown = Alt+"[6~"
+ val FnRight = Alt+"[F"
+ val FnLeft = Alt+"[H"
+
+ val AltUp = Alt*2+"[A"
+ val AltDown = Alt*2+"[B"
+ val AltRight = Alt*2+"[C"
+ val AltLeft = Alt*2+"[D"
+
+ val LinuxCtrlRight = Alt+"[1;5C"
+ val LinuxCtrlLeft = Alt+"[1;5D"
+
+ val FnAltUp = Alt*2+"[5~"
+ val FnAltDown = Alt*2+"[6~"
+ val FnAltRight = Alt+"[1;9F"
+ val FnAltLeft = Alt+"[1;9H"
+
+ // Same as fn-alt-{up, down}
+// val FnShiftUp = Alt*2+"[5~"
+// val FnShiftDown = Alt*2+"[6~"
+ val FnShiftRight = Alt+"[1;2F"
+ val FnShiftLeft = Alt+"[1;2H"
+
+ val AltShiftUp = Alt+"[1;10A"
+ val AltShiftDown = Alt+"[1;10B"
+ val AltShiftRight = Alt+"[1;10C"
+ val AltShiftLeft = Alt+"[1;10D"
+
+ // Same as fn-alt-{up, down}
+// val FnAltShiftUp = Alt*2+"[5~"
+// val FnAltShiftDown = Alt*2+"[6~"
+ val FnAltShiftRight = Alt+"[1;10F"
+ val FnAltShiftLeft = Alt+"[1;10H"
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/Terminal.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/Terminal.scala
new file mode 100644
index 000000000..4b18b38e3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/Terminal.scala
@@ -0,0 +1,320 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+
+import scala.annotation.tailrec
+import scala.collection.mutable
+
+/**
+ * The core logic around a terminal; it defines the base `filters` API
+ * through which anything (including basic cursor-navigation and typing)
+ * interacts with the terminal.
+ *
+ * Maintains basic invariants, such as "cursor should always be within
+ * the buffer", and "ansi terminal should reflect most up to date TermState"
+ */
+object Terminal {
+
+ /**
+ * Computes how tall a line of text is when wrapped at `width`.
+ *
+ * Even 0-character lines still take up one row!
+ *
+ * width = 2
+ * 0 -> 1
+ * 1 -> 1
+ * 2 -> 1
+ * 3 -> 2
+ * 4 -> 2
+ * 5 -> 3
+ */
+ def fragHeight(length: Int, width: Int) = math.max(1, (length - 1) / width + 1)
+
+ def splitBuffer(buffer: Vector[Char]) = {
+ val frags = mutable.Buffer.empty[Int]
+ frags.append(0)
+ for(c <- buffer){
+ if (c == '\n') frags.append(0)
+ else frags(frags.length - 1) = frags.last + 1
+ }
+ frags
+ }
+ def calculateHeight(buffer: Vector[Char],
+ width: Int,
+ prompt: String): Seq[Int] = {
+ val rowLengths = splitBuffer(buffer)
+
+ calculateHeight0(rowLengths, width - prompt.length)
+ }
+
+ /**
+ * Given a buffer with characters and newlines, calculates how high
+ * the buffer is and where the cursor goes inside of it.
+ */
+ def calculateHeight0(rowLengths: Seq[Int],
+ width: Int): Seq[Int] = {
+ val fragHeights =
+ rowLengths
+ .inits
+ .toVector
+ .reverse // We want shortest-to-longest, inits gives longest-to-shortest
+ .filter(_.nonEmpty) // Without the first empty prefix
+ .map{ x =>
+ fragHeight(
+ // If the frag barely fits on one line, give it
+ // an extra spot for the cursor on the next line
+ x.last + 1,
+ width
+ )
+ }
+// Debug("fragHeights " + fragHeights)
+ fragHeights
+ }
+
+ def positionCursor(cursor: Int,
+ rowLengths: Seq[Int],
+ fragHeights: Seq[Int],
+ width: Int) = {
+ var leftoverCursor = cursor
+ // Debug("leftoverCursor " + leftoverCursor)
+ var totalPreHeight = 0
+ var done = false
+ // Don't check if the cursor exceeds the last chunk, because
+ // even if it does there's nowhere else for it to go
+ for(i <- 0 until rowLengths.length -1 if !done) {
+ // length of frag and the '\n' after it
+ val delta = rowLengths(i) + 1
+ // Debug("delta " + delta)
+ val nextCursor = leftoverCursor - delta
+ if (nextCursor >= 0) {
+ // Debug("nextCursor " + nextCursor)
+ leftoverCursor = nextCursor
+ totalPreHeight += fragHeights(i)
+ }else done = true
+ }
+
+ val cursorY = totalPreHeight + leftoverCursor / width
+ val cursorX = leftoverCursor % width
+
+ (cursorY, cursorX)
+ }
+
+
+ type Action = (Vector[Char], Int) => (Vector[Char], Int)
+ type MsgAction = (Vector[Char], Int) => (Vector[Char], Int, String)
+
+
+ def noTransform(x: Vector[Char], i: Int) = (Ansi.Str.parse(x), i)
+ /**
+ * Blockingly reads a line from the given input stream and returns it.
+ *
+ * @param prompt The prompt to display when requesting input
+ * @param reader The input-stream where characters come in, e.g. System.in
+ * @param writer The output-stream where print-outs go, e.g. System.out
+ * @param filters A set of actions that can be taken depending on the input,
+ * @param displayTransform code to manipulate the display of the buffer and
+ * cursor, without actually changing the logical
+ * values inside them.
+ */
+ def readLine(prompt: Prompt,
+ reader: java.io.Reader,
+ writer: java.io.Writer,
+ filters: Filter,
+ displayTransform: (Vector[Char], Int) => (Ansi.Str, Int) = noTransform)
+ : Option[String] = {
+
+ /**
+ * Erases the previous line and re-draws it with the new buffer and
+ * cursor.
+ *
+ * Relies on `ups` to know how "tall" the previous line was, to go up
+ * and erase that many rows in the console. Performs a lot of horrific
+ * math all over the place, incredibly prone to off-by-ones, in order
+ * to at the end of the day position the cursor in the right spot.
+ */
+ def redrawLine(buffer: Ansi.Str,
+ cursor: Int,
+ ups: Int,
+ rowLengths: Seq[Int],
+ fullPrompt: Boolean = true,
+ newlinePrompt: Boolean = false) = {
+
+
+ // Enable this in certain cases (e.g. cursor near the value you are
+ // interested into) see what's going on with all the ansi screen-cursor
+ // movement
+ def debugDelay() = if (false){
+ Thread.sleep(200)
+ writer.flush()
+ }
+
+
+ val promptLine =
+ if (fullPrompt) prompt.full
+ else prompt.lastLine
+
+ val promptWidth = if(newlinePrompt) 0 else prompt.lastLine.length
+ val actualWidth = width - promptWidth
+
+ ansi.up(ups)
+ ansi.left(9999)
+ ansi.clearScreen(0)
+ writer.write(promptLine.toString)
+ if (newlinePrompt) writer.write("\n")
+
+ // I'm not sure why this is necessary, but it seems that without it, a
+ // cursor that "barely" overshoots the end of a line, at the end of the
+ // buffer, does not properly wrap and ends up dangling off the
+ // right-edge of the terminal window!
+ //
+ // This causes problems later since the cursor is at the wrong X/Y,
+ // confusing the rest of the math and ending up over-shooting on the
+ // `ansi.up` calls, over-writing earlier lines. This prints a single
+ // space such that instead of dangling it forces the cursor onto the
+ // next line for-realz. If it isn't dangling the extra space is a no-op
+ val lineStuffer = ' '
+ // Under `newlinePrompt`, we print the thing almost-verbatim, since we
+ // want to avoid breaking code by adding random indentation. If not, we
+ // are guaranteed that the lines are short, so we can indent the newlines
+ // without fear of wrapping
+ val newlineReplacement =
+ if (newlinePrompt) {
+
+ Array(lineStuffer, '\n')
+ } else {
+ val indent = " " * prompt.lastLine.length
+ Array('\n', indent:_*)
+ }
+
+ writer.write(
+ buffer.render.flatMap{
+ case '\n' => newlineReplacement
+ case x => Array(x)
+ }.toArray
+ )
+ writer.write(lineStuffer)
+
+ val fragHeights = calculateHeight0(rowLengths, actualWidth)
+ val (cursorY, cursorX) = positionCursor(
+ cursor,
+ rowLengths,
+ fragHeights,
+ actualWidth
+ )
+ ansi.up(fragHeights.sum - 1)
+ ansi.left(9999)
+ ansi.down(cursorY)
+ ansi.right(cursorX)
+ if (!newlinePrompt) ansi.right(prompt.lastLine.length)
+
+ writer.flush()
+ }
+
+ @tailrec
+ def readChar(lastState: TermState, ups: Int, fullPrompt: Boolean = true): Option[String] = {
+ val moreInputComing = reader.ready()
+
+ lazy val (transformedBuffer0, cursorOffset) = displayTransform(
+ lastState.buffer,
+ lastState.cursor
+ )
+
+ lazy val transformedBuffer = transformedBuffer0 ++ lastState.msg
+ lazy val lastOffsetCursor = lastState.cursor + cursorOffset
+ lazy val rowLengths = splitBuffer(
+ lastState.buffer ++ lastState.msg.plainText
+ )
+ val narrowWidth = width - prompt.lastLine.length
+ val newlinePrompt = rowLengths.exists(_ >= narrowWidth)
+ val promptWidth = if(newlinePrompt) 0 else prompt.lastLine.length
+ val actualWidth = width - promptWidth
+ val newlineUp = if (newlinePrompt) 1 else 0
+ if (!moreInputComing) redrawLine(
+ transformedBuffer,
+ lastOffsetCursor,
+ ups,
+ rowLengths,
+ fullPrompt,
+ newlinePrompt
+ )
+
+ lazy val (oldCursorY, _) = positionCursor(
+ lastOffsetCursor,
+ rowLengths,
+ calculateHeight0(rowLengths, actualWidth),
+ actualWidth
+ )
+
+ def updateState(s: LazyList[Int],
+ b: Vector[Char],
+ c: Int,
+ msg: Ansi.Str): (Int, TermState) = {
+
+ val newCursor = math.max(math.min(c, b.length), 0)
+ val nextUps =
+ if (moreInputComing) ups
+ else oldCursorY + newlineUp
+
+ val newState = TermState(s, b, newCursor, msg)
+
+ (nextUps, newState)
+ }
+ // `.get` because we assume that *some* filter is going to match each
+ // character, even if only to dump the character to the screen. If nobody
+ // matches the character then we can feel free to blow up
+ filters.op(TermInfo(lastState, actualWidth)).get match {
+ case Printing(TermState(s, b, c, msg), stdout) =>
+ writer.write(stdout)
+ val (nextUps, newState) = updateState(s, b, c, msg)
+ readChar(newState, nextUps)
+
+ case TermState(s, b, c, msg) =>
+ val (nextUps, newState) = updateState(s, b, c, msg)
+ readChar(newState, nextUps, false)
+
+ case Result(s) =>
+ redrawLine(
+ transformedBuffer, lastState.buffer.length,
+ oldCursorY + newlineUp, rowLengths, false, newlinePrompt
+ )
+ writer.write(10)
+ writer.write(13)
+ writer.flush()
+ Some(s)
+ case ClearScreen(ts) =>
+ ansi.clearScreen(2)
+ ansi.up(9999)
+ ansi.left(9999)
+ readChar(ts, ups)
+ case Exit =>
+ None
+ }
+ }
+
+ lazy val ansi = new AnsiNav(writer)
+ lazy val (width, _, initialConfig) = TTY.init()
+ try {
+ readChar(TermState(LazyList.continually(reader.read()), Vector.empty, 0, ""), 0)
+ }finally{
+
+ // Don't close these! Closing these closes stdin/stdout,
+ // which seems to kill the entire program
+
+ // reader.close()
+ // writer.close()
+ TTY.stty(initialConfig)
+ }
+ }
+}
+object Prompt {
+ implicit def construct(prompt: String): Prompt = {
+ val parsedPrompt = Ansi.Str.parse(prompt)
+ val index = parsedPrompt.plainText.lastIndexOf('\n')
+ val (_, last) = parsedPrompt.splitAt(index+1)
+ Prompt(parsedPrompt, last)
+ }
+}
+
+case class Prompt(full: Ansi.Str, lastLine: Ansi.Str)
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/Utils.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/Utils.scala
new file mode 100644
index 000000000..64a2c1476
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/Utils.scala
@@ -0,0 +1,169 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+import java.io.{FileOutputStream, Writer, File => JFile}
+import scala.annotation.tailrec
+
+/**
+ * Prints stuff to an ad-hoc logging file when running the repl or terminal in
+ * development mode
+ *
+ * Very handy for the common case where you're debugging terminal interactions
+ * and cannot use `println` because it will stomp all over your already messed
+ * up terminal state and block debugging. With [[Debug]], you can have a
+ * separate terminal open tailing the log file and log as verbosely as you
+ * want without affecting the primary terminal you're using to interact with
+ * Ammonite.
+ */
+object Debug {
+ lazy val debugOutput =
+ new FileOutputStream(new JFile("terminal/target/log"))
+
+ def apply(s: Any) =
+ if (System.getProperty("ammonite-sbt-build") == "true")
+ debugOutput.write((System.currentTimeMillis() + "\t\t" + s + "\n").getBytes)
+}
+
+class AnsiNav(output: Writer) {
+ def control(n: Int, c: Char) = output.write(s"\033[" + n + c)
+
+ /**
+ * Move up `n` squares
+ */
+ def up(n: Int) = if (n == 0) "" else control(n, 'A')
+ /**
+ * Move down `n` squares
+ */
+ def down(n: Int) = if (n == 0) "" else control(n, 'B')
+ /**
+ * Move right `n` squares
+ */
+ def right(n: Int) = if (n == 0) "" else control(n, 'C')
+ /**
+ * Move left `n` squares
+ */
+ def left(n: Int) = if (n == 0) "" else control(n, 'D')
+
+ /**
+ * Clear the screen
+ *
+ * n=0: clear from cursor to end of screen
+ * n=1: clear from cursor to start of screen
+ * n=2: clear entire screen
+ */
+ def clearScreen(n: Int) = control(n, 'J')
+ /**
+ * Clear the current line
+ *
+ * n=0: clear from cursor to end of line
+ * n=1: clear from cursor to start of line
+ * n=2: clear entire line
+ */
+ def clearLine(n: Int) = control(n, 'K')
+}
+
+object AnsiNav {
+ val resetUnderline = "\u001b[24m"
+ val resetForegroundColor = "\u001b[39m"
+ val resetBackgroundColor = "\u001b[49m"
+}
+
+object TTY {
+
+ // Prefer standard tools. Not sure why we need to do this, but for some
+ // reason the version installed by gnu-coreutils blows up sometimes giving
+ // "unable to perform all requested operations"
+ val pathedTput = if (new java.io.File("/usr/bin/tput").exists()) "/usr/bin/tput" else "tput"
+ val pathedStty = if (new java.io.File("/bin/stty").exists()) "/bin/stty" else "stty"
+
+ def consoleDim(s: String) = {
+ import sys.process._
+ Seq("bash", "-c", s"$pathedTput $s 2> /dev/tty").!!.trim.toInt
+ }
+ def init() = {
+ stty("-a")
+
+ val width = consoleDim("cols")
+ val height = consoleDim("lines")
+// Debug("Initializing, Width " + width)
+// Debug("Initializing, Height " + height)
+ val initialConfig = stty("-g").trim
+ stty("-icanon min 1 -icrnl -inlcr -ixon")
+ sttyFailTolerant("dsusp undef")
+ stty("-echo")
+ stty("intr undef")
+// Debug("")
+ (width, height, initialConfig)
+ }
+
+ private def sttyCmd(s: String) = {
+ import sys.process._
+ Seq("bash", "-c", s"$pathedStty $s < /dev/tty"): ProcessBuilder
+ }
+
+ def stty(s: String) =
+ sttyCmd(s).!!
+ /*
+ * Executes a stty command for which failure is expected, hence the return
+ * status can be non-null and errors are ignored.
+ * This is appropriate for `stty dsusp undef`, since it's unsupported on Linux
+ * (http://man7.org/linux/man-pages/man3/termios.3.html).
+ */
+ def sttyFailTolerant(s: String) =
+ sttyCmd(s ++ " 2> /dev/null").!
+
+ def restore(initialConfig: String) = {
+ stty(initialConfig)
+ }
+}
+
+/**
+ * A truly-lazy implementation of scala.Stream
+ */
+case class LazyList[T](headThunk: () => T, tailThunk: () => LazyList[T]) {
+ var rendered = false
+ lazy val head = {
+ rendered = true
+ headThunk()
+ }
+
+ lazy val tail = tailThunk()
+
+ def dropPrefix(prefix: Seq[T]) = {
+ @tailrec def rec(n: Int, l: LazyList[T]): Option[LazyList[T]] = {
+ if (n >= prefix.length) Some(l)
+ else if (prefix(n) == l.head) rec(n + 1, l.tail)
+ else None
+ }
+ rec(0, this)
+ }
+ override def toString = {
+
+ @tailrec def rec(l: LazyList[T], res: List[T]): List[T] = {
+ if (l.rendered) rec(l.tailThunk(), l.head :: res)
+ else res
+ }
+ s"LazyList(${(rec(this, Nil).reverse ++ Seq("...")).mkString(",")})"
+ }
+
+ def ~:(other: => T) = LazyList(() => other, () => this)
+}
+
+object LazyList {
+ object ~: {
+ def unapply[T](x: LazyList[T]) = Some((x.head, x.tail))
+ }
+
+ def continually[T](t: => T): LazyList[T] = LazyList(() => t, () =>continually(t))
+
+ implicit class CS(ctx: StringContext) {
+ val base = ctx.parts.mkString
+ object p {
+ def unapply(s: LazyList[Int]): Option[LazyList[Int]] = {
+ s.dropPrefix(base.map(_.toInt))
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala
new file mode 100644
index 000000000..faa97c348
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala
@@ -0,0 +1,163 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import ammonite.terminal.FilterTools._
+import ammonite.terminal.LazyList._
+import ammonite.terminal.SpecialKeys._
+import ammonite.terminal.Filter
+import ammonite.terminal._
+
+/**
+ * Filters for simple operation of a terminal: cursor-navigation
+ * (including with all the modifier keys), enter/ctrl-c-exit, etc.
+ */
+object BasicFilters {
+ def all = Filter.merge(
+ navFilter,
+ exitFilter,
+ enterFilter,
+ clearFilter,
+ //loggingFilter,
+ typingFilter
+ )
+
+ def injectNewLine(b: Vector[Char], c: Int, rest: LazyList[Int], indent: Int = 0) = {
+ val (first, last) = b.splitAt(c)
+ TermState(rest, (first :+ '\n') ++ last ++ Vector.fill(indent)(' '), c + 1 + indent)
+ }
+
+ def navFilter = Filter.merge(
+ Case(Up)((b, c, m) => moveUp(b, c, m.width)),
+ Case(Down)((b, c, m) => moveDown(b, c, m.width)),
+ Case(Right)((b, c, m) => (b, c + 1)),
+ Case(Left)((b, c, m) => (b, c - 1))
+ )
+
+ def tabColumn(indent: Int, b: Vector[Char], c: Int, rest: LazyList[Int]) = {
+ val (chunks, chunkStarts, chunkIndex) = FilterTools.findChunks(b, c)
+ val chunkCol = c - chunkStarts(chunkIndex)
+ val spacesToInject = indent - (chunkCol % indent)
+ val (lhs, rhs) = b.splitAt(c)
+ TS(rest, lhs ++ Vector.fill(spacesToInject)(' ') ++ rhs, c + spacesToInject)
+ }
+
+ def tabFilter(indent: Int): Filter = Filter("tabFilter") {
+ case TS(9 ~: rest, b, c, _) => tabColumn(indent, b, c, rest)
+ }
+
+ def loggingFilter: Filter = Filter("loggingFilter") {
+ case TS(Ctrl('q') ~: rest, b, c, _) =>
+ println("Char Display Mode Enabled! Ctrl-C to exit")
+ var curr = rest
+ while (curr.head != 3) {
+ println("Char " + curr.head)
+ curr = curr.tail
+ }
+ TS(curr, b, c)
+ }
+
+ def typingFilter: Filter = Filter("typingFilter") {
+ case TS(p"\u001b[3~$rest", b, c, _) =>
+// Debug("fn-delete")
+ val (first, last) = b.splitAt(c)
+ TS(rest, first ++ last.drop(1), c)
+
+ case TS(127 ~: rest, b, c, _) => // Backspace
+ val (first, last) = b.splitAt(c)
+ TS(rest, first.dropRight(1) ++ last, c - 1)
+
+ case TS(char ~: rest, b, c, _) =>
+// Debug("NORMAL CHAR " + char)
+ val (first, last) = b.splitAt(c)
+ TS(rest, (first :+ char.toChar) ++ last, c + 1)
+ }
+
+ def doEnter(b: Vector[Char], c: Int, rest: LazyList[Int]) = {
+ val (chunks, chunkStarts, chunkIndex) = FilterTools.findChunks(b, c)
+ if (chunkIndex == chunks.length - 1) Result(b.mkString)
+ else injectNewLine(b, c, rest)
+ }
+
+ def enterFilter: Filter = Filter("enterFilter") {
+ case TS(13 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ case TS(10 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ case TS(10 ~: 13 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ case TS(13 ~: 10 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ }
+
+ def exitFilter: Filter = Filter("exitFilter") {
+ case TS(Ctrl('c') ~: rest, b, c, _) =>
+ Result("")
+ case TS(Ctrl('d') ~: rest, b, c, _) =>
+ // only exit if the line is empty, otherwise, behave like
+ // "delete" (i.e. delete one char to the right)
+ if (b.isEmpty) Exit else {
+ val (first, last) = b.splitAt(c)
+ TS(rest, first ++ last.drop(1), c)
+ }
+ case TS(-1 ~: rest, b, c, _) => Exit // java.io.Reader.read() produces -1 on EOF
+ }
+
+ def clearFilter: Filter = Filter("clearFilter") {
+ case TS(Ctrl('l') ~: rest, b, c, _) => ClearScreen(TS(rest, b, c))
+ }
+
+ def moveStart(b: Vector[Char], c: Int, w: Int) = {
+ val (_, chunkStarts, chunkIndex) = findChunks(b, c)
+ val currentColumn = (c - chunkStarts(chunkIndex)) % w
+ b -> (c - currentColumn)
+ }
+
+ def moveEnd(b: Vector[Char], c: Int, w: Int) = {
+ val (chunks, chunkStarts, chunkIndex) = findChunks(b, c)
+ val currentColumn = (c - chunkStarts(chunkIndex)) % w
+ val c1 = chunks.lift(chunkIndex + 1) match {
+ case Some(next) =>
+ val boundary = chunkStarts(chunkIndex + 1) - 1
+ if ((boundary - c) > (w - currentColumn)) {
+ val delta= w - currentColumn
+ c + delta
+ }
+ else boundary
+ case None =>
+ c + 1 * 9999
+ }
+ b -> c1
+ }
+
+ def moveUpDown(
+ b: Vector[Char],
+ c: Int,
+ w: Int,
+ boundaryOffset: Int,
+ nextChunkOffset: Int,
+ checkRes: Int,
+ check: (Int, Int) => Boolean,
+ isDown: Boolean
+ ) = {
+ val (chunks, chunkStarts, chunkIndex) = findChunks(b, c)
+ val offset = chunkStarts(chunkIndex + boundaryOffset)
+ if (check(checkRes, offset)) checkRes
+ else chunks.lift(chunkIndex + nextChunkOffset) match {
+ case None => c + nextChunkOffset * 9999
+ case Some(next) =>
+ val boundary = chunkStarts(chunkIndex + boundaryOffset)
+ val currentColumn = (c - chunkStarts(chunkIndex)) % w
+
+ if (isDown) boundary + math.min(currentColumn, next)
+ else boundary + math.min(currentColumn - next % w, 0) - 1
+ }
+ }
+
+ def moveUp(b: Vector[Char], c: Int, w: Int) = {
+ b -> moveUpDown(b, c, w, 0, -1, c - w, _ > _, false)
+ }
+
+ def moveDown(b: Vector[Char], c: Int, w: Int) = {
+ b -> moveUpDown(b, c, w, 1, 1, c + w, _ <= _, true)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala
new file mode 100644
index 000000000..69a9769c6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala
@@ -0,0 +1,170 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.LazyList.~:
+import terminal.SpecialKeys._
+import terminal.DelegateFilter
+import terminal._
+
+/**
+ * Filters have hook into the various {Ctrl,Shift,Fn,Alt}x{Up,Down,Left,Right}
+ * combination keys, and make them behave similarly as they would on a normal
+ * GUI text editor: alt-{left, right} for word movement, hold-down-shift for
+ * text selection, etc.
+ */
+object GUILikeFilters {
+ case class SelectionFilter(indent: Int) extends DelegateFilter {
+ def identifier = "SelectionFilter"
+ var mark: Option[Int] = None
+
+ def setMark(c: Int) = {
+ Debug("setMark\t" + mark + "\t->\t" + c)
+ if (mark == None) mark = Some(c)
+ }
+
+ def doIndent(
+ b: Vector[Char],
+ c: Int,
+ rest: LazyList[Int],
+ slicer: Vector[Char] => Int
+ ) = {
+
+ val markValue = mark.get
+ val (chunks, chunkStarts, chunkIndex) = FilterTools.findChunks(b, c)
+ val min = chunkStarts.lastIndexWhere(_ <= math.min(c, markValue))
+ val max = chunkStarts.indexWhere(_ > math.max(c, markValue))
+ val splitPoints = chunkStarts.slice(min, max)
+ val frags = (0 +: splitPoints :+ 99999).sliding(2).zipWithIndex
+
+ var firstOffset = 0
+ val broken =
+ for((Seq(l, r), i) <- frags) yield {
+ val slice = b.slice(l, r)
+ if (i == 0) slice
+ else {
+ val cut = slicer(slice)
+
+ if (i == 1) firstOffset = cut
+
+ if (cut < 0) slice.drop(-cut)
+ else Vector.fill(cut)(' ') ++ slice
+ }
+ }
+ val flattened = broken.flatten.toVector
+ val deeperOffset = flattened.length - b.length
+
+ val (newMark, newC) =
+ if (mark.get > c) (mark.get + deeperOffset, c + firstOffset)
+ else (mark.get + firstOffset, c + deeperOffset)
+
+ mark = Some(newMark)
+ TS(rest, flattened, newC)
+ }
+
+ def filter = Filter.merge(
+
+ Case(ShiftUp) {(b, c, m) => setMark(c); BasicFilters.moveUp(b, c, m.width)},
+ Case(ShiftDown) {(b, c, m) => setMark(c); BasicFilters.moveDown(b, c, m.width)},
+ Case(ShiftRight) {(b, c, m) => setMark(c); (b, c + 1)},
+ Case(ShiftLeft) {(b, c, m) => setMark(c); (b, c - 1)},
+ Case(AltShiftUp) {(b, c, m) => setMark(c); BasicFilters.moveUp(b, c, m.width)},
+ Case(AltShiftDown) {(b, c, m) => setMark(c); BasicFilters.moveDown(b, c, m.width)},
+ Case(AltShiftRight) {(b, c, m) => setMark(c); wordRight(b, c)},
+ Case(AltShiftLeft) {(b, c, m) => setMark(c); wordLeft(b, c)},
+ Case(FnShiftRight) {(b, c, m) => setMark(c); BasicFilters.moveEnd(b, c, m.width)},
+ Case(FnShiftLeft) {(b, c, m) => setMark(c); BasicFilters.moveStart(b, c, m.width)},
+ Filter("fnOtherFilter") {
+ case TS(27 ~: 91 ~: 90 ~: rest, b, c, _) if mark.isDefined =>
+ doIndent(b, c, rest,
+ slice => -math.min(slice.iterator.takeWhile(_ == ' ').size, indent)
+ )
+
+ case TS(9 ~: rest, b, c, _) if mark.isDefined => // Tab
+ doIndent(b, c, rest,
+ slice => indent
+ )
+
+ // Intercept every other character.
+ case TS(char ~: inputs, buffer, cursor, _) if mark.isDefined =>
+ // If it's a special command, just cancel the current selection.
+ if (char.toChar.isControl &&
+ char != 127 /*backspace*/ &&
+ char != 13 /*enter*/ &&
+ char != 10 /*enter*/) {
+ mark = None
+ TS(char ~: inputs, buffer, cursor)
+ } else {
+ // If it's a printable character, delete the current
+ // selection and write the printable character.
+ val Seq(min, max) = Seq(mark.get, cursor).sorted
+ mark = None
+ val newBuffer = buffer.take(min) ++ buffer.drop(max)
+ val newInputs =
+ if (char == 127) inputs
+ else char ~: inputs
+ TS(newInputs, newBuffer, min)
+ }
+ }
+ )
+ }
+
+ object SelectionFilter {
+ def mangleBuffer(
+ selectionFilter: SelectionFilter,
+ string: Ansi.Str,
+ cursor: Int,
+ startColor: Ansi.Attr
+ ) = {
+ selectionFilter.mark match {
+ case Some(mark) if mark != cursor =>
+ val Seq(min, max) = Seq(cursor, mark).sorted
+ val displayOffset = if (cursor < mark) 0 else -1
+ val newStr = string.overlay(startColor, min, max)
+ (newStr, displayOffset)
+ case _ => (string, 0)
+ }
+ }
+ }
+
+ val fnFilter = Filter.merge(
+ Case(FnUp)((b, c, m) => (b, c - 9999)),
+ Case(FnDown)((b, c, m) => (b, c + 9999)),
+ Case(FnRight)((b, c, m) => BasicFilters.moveEnd(b, c, m.width)),
+ Case(FnLeft)((b, c, m) => BasicFilters.moveStart(b, c, m.width))
+ )
+ val altFilter = Filter.merge(
+ Case(AltUp) {(b, c, m) => BasicFilters.moveUp(b, c, m.width)},
+ Case(AltDown) {(b, c, m) => BasicFilters.moveDown(b, c, m.width)},
+ Case(AltRight) {(b, c, m) => wordRight(b, c)},
+ Case(AltLeft) {(b, c, m) => wordLeft(b, c)}
+ )
+
+ val fnAltFilter = Filter.merge(
+ Case(FnAltUp) {(b, c, m) => (b, c)},
+ Case(FnAltDown) {(b, c, m) => (b, c)},
+ Case(FnAltRight) {(b, c, m) => (b, c)},
+ Case(FnAltLeft) {(b, c, m) => (b, c)}
+ )
+ val fnAltShiftFilter = Filter.merge(
+ Case(FnAltShiftRight) {(b, c, m) => (b, c)},
+ Case(FnAltShiftLeft) {(b, c, m) => (b, c)}
+ )
+
+
+ def consumeWord(b: Vector[Char], c: Int, delta: Int, offset: Int) = {
+ var current = c
+ while(b.isDefinedAt(current) && !b(current).isLetterOrDigit) current += delta
+ while(b.isDefinedAt(current) && b(current).isLetterOrDigit) current += delta
+ current + offset
+ }
+
+ // c -1 to move at least one character! Otherwise you get stuck at the start of
+ // a word.
+ def wordLeft(b: Vector[Char], c: Int) = b -> consumeWord(b, c - 1, -1, 1)
+ def wordRight(b: Vector[Char], c: Int) = b -> consumeWord(b, c, 1, 0)
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala
new file mode 100644
index 000000000..dac1c9d23
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala
@@ -0,0 +1,334 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.LazyList._
+import terminal._
+
+/**
+ * Provides history navigation up and down, saving the current line, a well
+ * as history-search functionality (`Ctrl R` in bash) letting you quickly find
+ * & filter previous commands by entering a sub-string.
+ */
+class HistoryFilter(
+ history: () => IndexedSeq[String],
+ commentStartColor: String,
+ commentEndColor: String
+) extends DelegateFilter {
+
+
+ def identifier = "HistoryFilter"
+ /**
+ * `-1` means we haven't started looking at history, `n >= 0` means we're
+ * currently at history command `n`
+ */
+ var historyIndex = -1
+
+ /**
+ * The term we're searching for, if any.
+ *
+ * - `None` means we're not searching for anything, e.g. we're just
+ * browsing history
+ *
+ * - `Some(term)` where `term` is not empty is what it normally looks
+ * like when we're searching for something
+ *
+ * - `Some(term)` where `term` is empty only really happens when you
+ * start searching and delete things, or if you `Ctrl-R` on an empty
+ * prompt
+ */
+ var searchTerm: Option[Vector[Char]] = None
+
+ /**
+ * Records the last buffer that the filter has observed while it's in
+ * search/history mode. If the new buffer differs from this, assume that
+ * some other filter modified the buffer and drop out of search/history
+ */
+ var prevBuffer: Option[Vector[Char]] = None
+
+ /**
+ * Kicks the HistoryFilter from passive-mode into search-history mode
+ */
+ def startHistory(b: Vector[Char], c: Int): (Vector[Char], Int, String) = {
+ if (b.nonEmpty) searchTerm = Some(b)
+ up(Vector(), c)
+ }
+
+ def searchHistory(
+ start: Int,
+ increment: Int,
+ buffer: Vector[Char],
+ skipped: Vector[Char]
+ ) = {
+
+ def nextHistoryIndexFor(v: Vector[Char]) = {
+ HistoryFilter.findNewHistoryIndex(start, v, history(), increment, skipped)
+ }
+
+ val (newHistoryIndex, newBuffer, newMsg, newCursor) = searchTerm match {
+ // We're not searching for anything, just browsing history.
+ // Pass in Vector.empty so we scroll through all items
+ case None =>
+ val (i, b, c) = nextHistoryIndexFor(Vector.empty)
+ (i, b, "", 99999)
+
+ // We're searching for some item with a particular search term
+ case Some(b) if b.nonEmpty =>
+ val (i, b1, c) = nextHistoryIndexFor(b)
+
+ val msg =
+ if (i.nonEmpty) ""
+ else commentStartColor + HistoryFilter.cannotFindSearchMessage + commentEndColor
+
+ (i, b1, msg, c)
+
+ // We're searching for nothing in particular; in this case,
+ // show a help message instead of an unhelpful, empty buffer
+ case Some(b) if b.isEmpty =>
+ val msg = commentStartColor + HistoryFilter.emptySearchMessage + commentEndColor
+ // The cursor in this case always goes to zero
+ (Some(start), Vector(), msg, 0)
+
+ }
+
+ historyIndex = newHistoryIndex.getOrElse(-1)
+
+ (newBuffer, newCursor, newMsg)
+ }
+
+ def activeHistory = searchTerm.nonEmpty || historyIndex != -1
+ def activeSearch = searchTerm.nonEmpty
+
+ def up(b: Vector[Char], c: Int) =
+ searchHistory(historyIndex + 1, 1, b, b)
+
+ def down(b: Vector[Char], c: Int) =
+ searchHistory(historyIndex - 1, -1, b, b)
+
+ def wrap(rest: LazyList[Int], out: (Vector[Char], Int, String)) =
+ TS(rest, out._1, out._2, out._3)
+
+ def ctrlR(b: Vector[Char], c: Int) =
+ if (activeSearch) up(b, c)
+ else {
+ searchTerm = Some(b)
+ up(Vector(), c)
+ }
+
+ def printableChar(char: Char)(b: Vector[Char], c: Int) = {
+ searchTerm = searchTerm.map(_ :+ char)
+ searchHistory(historyIndex.max(0), 1, b :+ char, Vector())
+ }
+
+ def backspace(b: Vector[Char], c: Int) = {
+ searchTerm = searchTerm.map(_.dropRight(1))
+ searchHistory(historyIndex, 1, b, Vector())
+ }
+
+ /**
+ * Predicate to check if either we're searching for a term or if we're in
+ * history-browsing mode and some predicate is true.
+ *
+ * Very often we want to capture keystrokes in search-mode more aggressively
+ * than in history-mode, e.g. search-mode drops you out more aggressively
+ * than history-mode does, and its up/down keys cycle through history more
+ * aggressively on every keystroke while history-mode only cycles when you
+ * reach the top/bottom line of the multi-line input.
+ */
+ def searchOrHistoryAnd(cond: Boolean) =
+ activeSearch || (activeHistory && cond)
+
+ val dropHistoryChars = Set(9, 13, 10) // Tab or Enter
+
+ def endHistory() = {
+ historyIndex = -1
+ searchTerm = None
+ }
+
+ def filter = Filter.wrap("historyFilterWrap1") {
+ (ti: TermInfo) => {
+ prelude.op(ti) match {
+ case None =>
+ prevBuffer = Some(ti.ts.buffer)
+ filter0.op(ti) match {
+ case Some(ts: TermState) =>
+ prevBuffer = Some(ts.buffer)
+ Some(ts)
+ case x => x
+ }
+ case some => some
+ }
+ }
+ }
+
+ def prelude: Filter = Filter("historyPrelude") {
+ case TS(inputs, b, c, _) if activeHistory && prevBuffer.exists(_ != b) =>
+ endHistory()
+ prevBuffer = None
+ TS(inputs, b, c)
+ }
+
+ def filter0: Filter = Filter("filter0") {
+ // Ways to kick off the history/search if you're not already in it
+
+ // `Ctrl-R`
+ case TS(18 ~: rest, b, c, _) => wrap(rest, ctrlR(b, c))
+
+ // `Up` from the first line in the input
+ case TermInfo(TS(p"\u001b[A$rest", b, c, _), w) if firstRow(c, b, w) && !activeHistory =>
+ wrap(rest, startHistory(b, c))
+
+ // `Ctrl P`
+ case TermInfo(TS(p"\u0010$rest", b, c, _), w) if firstRow(c, b, w) && !activeHistory =>
+ wrap(rest, startHistory(b, c))
+
+ // `Page-Up` from first character starts history
+ case TermInfo(TS(p"\u001b[5~$rest", b, c, _), w) if c == 0 && !activeHistory =>
+ wrap(rest, startHistory(b, c))
+
+ // Things you can do when you're already in the history search
+
+ // Navigating up and down the history. Each up or down searches for
+ // the next thing that matches your current searchTerm
+ // Up
+ case TermInfo(TS(p"\u001b[A$rest", b, c, _), w) if searchOrHistoryAnd(firstRow(c, b, w)) =>
+ wrap(rest, up(b, c))
+
+ // Ctrl P
+ case TermInfo(TS(p"\u0010$rest", b, c, _), w) if searchOrHistoryAnd(firstRow(c, b, w)) =>
+ wrap(rest, up(b, c))
+
+ // `Page-Up` from first character cycles history up
+ case TermInfo(TS(p"\u001b[5~$rest", b, c, _), w) if searchOrHistoryAnd(c == 0) =>
+ wrap(rest, up(b, c))
+
+ // Down
+ case TermInfo(TS(p"\u001b[B$rest", b, c, _), w) if searchOrHistoryAnd(lastRow(c, b, w)) =>
+ wrap(rest, down(b, c))
+
+ // `Ctrl N`
+
+ case TermInfo(TS(p"\u000e$rest", b, c, _), w) if searchOrHistoryAnd(lastRow(c, b, w)) =>
+ wrap(rest, down(b, c))
+ // `Page-Down` from last character cycles history down
+ case TermInfo(TS(p"\u001b[6~$rest", b, c, _), w) if searchOrHistoryAnd(c == b.length - 1) =>
+ wrap(rest, down(b, c))
+
+
+ // Intercept Backspace and delete a character in search-mode, preserving it, but
+ // letting it fall through and dropping you out of history-mode if you try to make
+ // edits
+ case TS(127 ~: rest, buffer, cursor, _) if activeSearch =>
+ wrap(rest, backspace(buffer, cursor))
+
+ // Any other control characters drop you out of search mode, but only the
+ // set of `dropHistoryChars` drops you out of history mode
+ case TS(char ~: inputs, buffer, cursor, _)
+ if char.toChar.isControl && searchOrHistoryAnd(dropHistoryChars(char)) =>
+ val newBuffer =
+ // If we're back to -1, it means we've wrapped around and are
+ // displaying the original search term with a wrap-around message
+ // in the terminal. Drop the message and just preserve the search term
+ if (historyIndex == -1) searchTerm.get
+ // If we're searching for an empty string, special-case this and return
+ // an empty buffer rather than the first history item (which would be
+ // the default) because that wouldn't make much sense
+ else if (searchTerm.exists(_.isEmpty)) Vector()
+ // Otherwise, pick whatever history entry we're at and use that
+ else history()(historyIndex).toVector
+ endHistory()
+
+ TS(char ~: inputs, newBuffer, cursor)
+
+ // Intercept every other printable character when search is on and
+ // enter it into the current search
+ case TS(char ~: rest, buffer, cursor, _) if activeSearch =>
+ wrap(rest, printableChar(char.toChar)(buffer, cursor))
+
+ // If you're not in search but are in history, entering any printable
+ // characters kicks you out of it and preserves the current buffer. This
+ // makes it harder for you to accidentally lose work due to history-moves
+ case TS(char ~: rest, buffer, cursor, _) if activeHistory && !char.toChar.isControl =>
+ historyIndex = -1
+ TS(char ~: rest, buffer, cursor)
+ }
+}
+
+object HistoryFilter {
+
+ def mangleBuffer(
+ historyFilter: HistoryFilter,
+ buffer: Ansi.Str,
+ cursor: Int,
+ startColor: Ansi.Attr
+ ) = {
+ if (!historyFilter.activeSearch) buffer
+ else {
+ val (searchStart, searchEnd) =
+ if (historyFilter.searchTerm.get.isEmpty) (cursor, cursor+1)
+ else {
+ val start = buffer.plainText.indexOfSlice(historyFilter.searchTerm.get)
+
+ val end = start + (historyFilter.searchTerm.get.length max 1)
+ (start, end)
+ }
+
+ val newStr = buffer.overlay(startColor, searchStart, searchEnd)
+ newStr
+ }
+ }
+
+ /**
+ * @param startIndex The first index to start looking from
+ * @param searchTerm The term we're searching from; can be empty
+ * @param history The history we're searching through
+ * @param indexIncrement Which direction to search, +1 or -1
+ * @param skipped Any buffers which we should skip in our search results,
+ * e.g. because the user has seen them before.
+ */
+ def findNewHistoryIndex(
+ startIndex: Int,
+ searchTerm: Vector[Char],
+ history: IndexedSeq[String],
+ indexIncrement: Int,
+ skipped: Vector[Char]
+ ) = {
+ /**
+ * `Some(i)` means we found a reasonable result at history element `i`
+ * `None` means we couldn't find anything, and should show a not-found
+ * error to the user
+ */
+ def rec(i: Int): Option[Int] = history.lift(i) match {
+ // If i < 0, it means the user is pressing `down` too many times, which
+ // means it doesn't show anything but we shouldn't show an error
+ case None if i < 0 => Some(-1)
+ case None => None
+ case Some(s) if s.contains(searchTerm) && !s.contentEquals(skipped) =>
+ Some(i)
+ case _ => rec(i + indexIncrement)
+ }
+
+ val newHistoryIndex = rec(startIndex)
+ val foundIndex = newHistoryIndex.find(_ != -1)
+ val newBuffer = foundIndex match {
+ case None => searchTerm
+ case Some(i) => history(i).toVector
+ }
+
+ val newCursor = foundIndex match {
+ case None => newBuffer.length
+ case Some(i) => history(i).indexOfSlice(searchTerm) + searchTerm.length
+ }
+
+ (newHistoryIndex, newBuffer, newCursor)
+ }
+
+ val emptySearchMessage =
+ s" ...enter the string to search for, then `up` for more"
+ val cannotFindSearchMessage =
+ s" ...can't be found in history; re-starting search"
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala
new file mode 100644
index 000000000..eb79f2b04
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala
@@ -0,0 +1,165 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.SpecialKeys._
+import terminal.{DelegateFilter, Filter, Terminal}
+/**
+ * Filters for injection of readline-specific hotkeys, the sort that
+ * are available in bash, python and most other interactive command-lines
+ */
+object ReadlineFilters {
+ // www.bigsmoke.us/readline/shortcuts
+ // Ctrl-b <- one char
+ // Ctrl-f -> one char
+ // Alt-b <- one word
+ // Alt-f -> one word
+ // Ctrl-a <- start of line
+ // Ctrl-e -> end of line
+ // Ctrl-x-x Toggle start/end
+
+ // Backspace <- delete char
+ // Del -> delete char
+ // Ctrl-u <- delete all
+ // Ctrl-k -> delete all
+ // Alt-d -> delete word
+ // Ctrl-w <- delete word
+
+ // Ctrl-u/- Undo
+ // Ctrl-l clear screen
+
+ // Ctrl-k -> cut all
+ // Alt-d -> cut word
+ // Alt-Backspace <- cut word
+ // Ctrl-y paste last cut
+
+ /**
+ * Basic readline-style navigation, using all the obscure alphabet hotkeys
+ * rather than using arrows
+ */
+ lazy val navFilter = Filter.merge(
+ Case(Ctrl('b'))((b, c, m) => (b, c - 1)), // <- one char
+ Case(Ctrl('f'))((b, c, m) => (b, c + 1)), // -> one char
+ Case(Alt + "b")((b, c, m) => GUILikeFilters.wordLeft(b, c)), // <- one word
+ Case(Alt + "B")((b, c, m) => GUILikeFilters.wordLeft(b, c)), // <- one word
+ Case(LinuxCtrlLeft)((b, c, m) => GUILikeFilters.wordLeft(b, c)), // <- one word
+ Case(Alt + "f")((b, c, m) => GUILikeFilters.wordRight(b, c)), // -> one word
+ Case(Alt + "F")((b, c, m) => GUILikeFilters.wordRight(b, c)), // -> one word
+ Case(LinuxCtrlRight)((b, c, m) => GUILikeFilters.wordRight(b, c)), // -> one word
+ Case(Home)((b, c, m) => BasicFilters.moveStart(b, c, m.width)), // <- one line
+ Case(HomeScreen)((b, c, m) => BasicFilters.moveStart(b, c, m.width)), // <- one line
+ Case(Ctrl('a'))((b, c, m) => BasicFilters.moveStart(b, c, m.width)),
+ Case(End)((b, c, m) => BasicFilters.moveEnd(b, c, m.width)), // -> one line
+ Case(EndScreen)((b, c, m) => BasicFilters.moveEnd(b, c, m.width)), // -> one line
+ Case(Ctrl('e'))((b, c, m) => BasicFilters.moveEnd(b, c, m.width)),
+ Case(Alt + "t")((b, c, m) => transposeWord(b, c)),
+ Case(Alt + "T")((b, c, m) => transposeWord(b, c)),
+ Case(Ctrl('t'))((b, c, m) => transposeLetter(b, c))
+ )
+
+ def transposeLetter(b: Vector[Char], c: Int) =
+ // If there's no letter before the cursor to transpose, don't do anything
+ if (c == 0) (b, c)
+ else if (c == b.length) (b.dropRight(2) ++ b.takeRight(2).reverse, c)
+ else (b.patch(c-1, b.slice(c-1, c+1).reverse, 2), c + 1)
+
+ def transposeWord(b: Vector[Char], c: Int) = {
+ val leftStart0 = GUILikeFilters.consumeWord(b, c - 1, -1, 1)
+ val leftEnd0 = GUILikeFilters.consumeWord(b, leftStart0, 1, 0)
+ val rightEnd = GUILikeFilters.consumeWord(b, c, 1, 0)
+ val rightStart = GUILikeFilters.consumeWord(b, rightEnd - 1, -1, 1)
+
+ // If no word to the left to transpose, do nothing
+ if (leftStart0 == 0 && rightStart == 0) (b, c)
+ else {
+ val (leftStart, leftEnd) =
+ // If there is no word to the *right* to transpose,
+ // transpose the two words to the left instead
+ if (leftEnd0 == b.length && rightEnd == b.length) {
+ val leftStart = GUILikeFilters.consumeWord(b, leftStart0 - 1, -1, 1)
+ val leftEnd = GUILikeFilters.consumeWord(b, leftStart, 1, 0)
+ (leftStart, leftEnd)
+ }else (leftStart0, leftEnd0)
+
+ val newB =
+ b.slice(0, leftStart) ++
+ b.slice(rightStart, rightEnd) ++
+ b.slice(leftEnd, rightStart) ++
+ b.slice(leftStart, leftEnd) ++
+ b.slice(rightEnd, b.length)
+
+ (newB, rightEnd)
+ }
+ }
+
+ /**
+ * All the cut-pasting logic, though for many people they simply
+ * use these shortcuts for deleting and don't use paste much at all.
+ */
+ case class CutPasteFilter() extends DelegateFilter {
+ def identifier = "CutPasteFilter"
+ var accumulating = false
+ var currentCut = Vector.empty[Char]
+ def prepend(b: Vector[Char]) = {
+ if (accumulating) currentCut = b ++ currentCut
+ else currentCut = b
+ accumulating = true
+ }
+ def append(b: Vector[Char]) = {
+ if (accumulating) currentCut = currentCut ++ b
+ else currentCut = b
+ accumulating = true
+ }
+ def cutCharLeft(b: Vector[Char], c: Int) = {
+ /* Do not edit current cut. Zsh(zle) & Bash(readline) do not edit the yank ring for Ctrl-h */
+ (b patch(from = c - 1, patch = Nil, replaced = 1), c - 1)
+ }
+
+ def cutAllLeft(b: Vector[Char], c: Int) = {
+ prepend(b.take(c))
+ (b.drop(c), 0)
+ }
+ def cutAllRight(b: Vector[Char], c: Int) = {
+ append(b.drop(c))
+ (b.take(c), c)
+ }
+
+ def cutWordRight(b: Vector[Char], c: Int) = {
+ val start = GUILikeFilters.consumeWord(b, c, 1, 0)
+ append(b.slice(c, start))
+ (b.take(c) ++ b.drop(start), c)
+ }
+
+ def cutWordLeft(b: Vector[Char], c: Int) = {
+ val start = GUILikeFilters.consumeWord(b, c - 1, -1, 1)
+ prepend(b.slice(start, c))
+ (b.take(start) ++ b.drop(c), start)
+ }
+
+ def paste(b: Vector[Char], c: Int) = {
+ accumulating = false
+ (b.take(c) ++ currentCut ++ b.drop(c), c + currentCut.length)
+ }
+
+ def filter = Filter.merge(
+ Case(Ctrl('u'))((b, c, m) => cutAllLeft(b, c)),
+ Case(Ctrl('k'))((b, c, m) => cutAllRight(b, c)),
+ Case(Alt + "d")((b, c, m) => cutWordRight(b, c)),
+ Case(Ctrl('w'))((b, c, m) => cutWordLeft(b, c)),
+ Case(Alt + "\u007f")((b, c, m) => cutWordLeft(b, c)),
+ // weird hacks to make it run code every time without having to be the one
+ // handling the input; ideally we'd change Filter to be something
+ // other than a PartialFunction, but for now this will do.
+
+ // If some command goes through that's not appending/prepending to the
+ // kill ring, stop appending and allow the next kill to override it
+ Filter.wrap("ReadLineFilterWrap") {_ => accumulating = false; None},
+ Case(Ctrl('h'))((b, c, m) => cutCharLeft(b, c)),
+ Case(Ctrl('y'))((b, c, m) => paste(b, c))
+ )
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala
new file mode 100644
index 000000000..c265a7a4c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala
@@ -0,0 +1,157 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.LazyList.~:
+import terminal._
+import scala.collection.mutable
+
+/**
+ * A filter that implements "undo" functionality in the ammonite REPL. It
+ * shares the same `Ctrl -` hotkey that the bash undo, but shares behavior
+ * with the undo behavior in desktop text editors:
+ *
+ * - Multiple `delete`s in a row get collapsed
+ * - In addition to edits you can undo cursor movements: undo will bring your
+ * cursor back to location of previous edits before it undoes them
+ * - Provides "redo" functionality under `Alt -`/`Esc -`: un-undo the things
+ * you didn't actually want to undo!
+ *
+ * @param maxUndo: the maximum number of undo-frames that are stored.
+ */
+case class UndoFilter(maxUndo: Int = 25) extends DelegateFilter {
+ def identifier = "UndoFilter"
+ /**
+ * The current stack of states that undo/redo would cycle through.
+ *
+ * Not really the appropriate data structure, since when it reaches
+ * `maxUndo` in length we remove one element from the start whenever we
+ * append one element to the end, which costs `O(n)`. On the other hand,
+ * It also costs `O(n)` to maintain the buffer of previous states, and
+ * so `n` is probably going to be pretty small anyway (tens?) so `O(n)`
+ * is perfectly fine.
+ */
+ val undoBuffer = mutable.Buffer[(Vector[Char], Int)](Vector[Char]() -> 0)
+
+ /**
+ * The current position in the undoStack that the terminal is currently in.
+ */
+ var undoIndex = 0
+ /**
+ * An enum representing what the user is "currently" doing. Used to
+ * collapse sequential actions into one undo step: e.g. 10 plai
+ * chars typed becomes 1 undo step, or 10 chars deleted becomes one undo
+ * step, but 4 chars typed followed by 3 chars deleted followed by 3 chars
+ * typed gets grouped into 3 different undo steps
+ */
+ var state = UndoState.Default
+ def currentUndo = undoBuffer(undoBuffer.length - undoIndex - 1)
+
+ def undo(b: Vector[Char], c: Int) = {
+ val msg =
+ if (undoIndex >= undoBuffer.length - 1) UndoFilter.cannotUndoMsg
+ else {
+ undoIndex += 1
+ state = UndoState.Default
+ UndoFilter.undoMsg
+ }
+ val (b1, c1) = currentUndo
+ (b1, c1, msg)
+ }
+
+ def redo(b: Vector[Char], c: Int) = {
+ val msg =
+ if (undoIndex <= 0) UndoFilter.cannotRedoMsg
+ else {
+ undoIndex -= 1
+ state = UndoState.Default
+ UndoFilter.redoMsg
+ }
+
+ currentUndo
+ val (b1, c1) = currentUndo
+ (b1, c1, msg)
+ }
+
+ def wrap(bc: (Vector[Char], Int, Ansi.Str), rest: LazyList[Int]) = {
+ val (b, c, msg) = bc
+ TS(rest, b, c, msg)
+ }
+
+ def pushUndos(b: Vector[Char], c: Int) = {
+ val (lastB, lastC) = currentUndo
+ // Since we don't have access to the `typingFilter` in this code, we
+ // instead attempt to reverse-engineer "what happened" to the buffer by
+ // comparing the old one with the new.
+ //
+ // It turns out that it's not that hard to identify the few cases we care
+ // about, since they're all result in either 0 or 1 chars being different
+ // between old and new buffers.
+ val newState =
+ // Nothing changed means nothing changed
+ if (lastC == c && lastB == b) state
+ // if cursor advanced 1, and buffer grew by 1 at the cursor, we're typing
+ else if (lastC + 1 == c && lastB == b.patch(c-1, Nil, 1)) UndoState.Typing
+ // cursor moved left 1, and buffer lost 1 char at that point, we're deleting
+ else if (lastC - 1 == c && lastB.patch(c, Nil, 1) == b) UndoState.Deleting
+ // cursor didn't move, and buffer lost 1 char at that point, we're also deleting
+ else if (lastC == c && lastB.patch(c - 1, Nil, 1) == b) UndoState.Deleting
+ // cursor moved around but buffer didn't change, we're navigating
+ else if (lastC != c && lastB == b) UndoState.Navigating
+ // otherwise, sit in the "Default" state where every change is recorded.
+ else UndoState.Default
+
+ if (state != newState || newState == UndoState.Default && (lastB, lastC) != (b, c)) {
+ // If something changes: either we enter a new `UndoState`, or we're in
+ // the `Default` undo state and the terminal buffer/cursor change, then
+ // truncate the `undoStack` and add a new tuple to the stack that we can
+ // build upon. This means that we lose all ability to re-do actions after
+ // someone starts making edits, which is consistent with most other
+ // editors
+ state = newState
+ undoBuffer.remove(undoBuffer.length - undoIndex, undoIndex)
+ undoIndex = 0
+
+ if (undoBuffer.length == maxUndo) undoBuffer.remove(0)
+
+ undoBuffer.append(b -> c)
+ } else if (undoIndex == 0 && (b, c) != undoBuffer(undoBuffer.length - 1)) {
+ undoBuffer(undoBuffer.length - 1) = (b, c)
+ }
+
+ state = newState
+ }
+
+ def filter = Filter.merge(
+ Filter.wrap("undoFilterWrapped") {
+ case TS(q ~: rest, b, c, _) =>
+ pushUndos(b, c)
+ None
+ },
+ Filter("undoFilter") {
+ case TS(31 ~: rest, b, c, _) => wrap(undo(b, c), rest)
+ case TS(27 ~: 114 ~: rest, b, c, _) => wrap(undo(b, c), rest)
+ case TS(27 ~: 45 ~: rest, b, c, _) => wrap(redo(b, c), rest)
+ }
+ )
+}
+
+
+sealed class UndoState(override val toString: String)
+object UndoState {
+ val Default = new UndoState("Default")
+ val Typing = new UndoState("Typing")
+ val Deleting = new UndoState("Deleting")
+ val Navigating = new UndoState("Navigating")
+}
+
+object UndoFilter {
+ val undoMsg = Ansi.Color.Blue(" ...undoing last action, `Alt -` or `Esc -` to redo")
+ val cannotUndoMsg = Ansi.Color.Blue(" ...no more actions to undo")
+ val redoMsg = Ansi.Color.Blue(" ...redoing last action")
+ val cannotRedoMsg = Ansi.Color.Blue(" ...no more actions to redo")
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
new file mode 100644
index 000000000..95f468995
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
@@ -0,0 +1,63 @@
+package dotty.tools
+package dotc
+package reporting
+
+import core.Contexts._
+import java.io.{ BufferedReader, PrintWriter }
+import diagnostic.{ Message, MessageContainer }
+import diagnostic.messages.{ Error, Warning, ConditionalWarning }
+
+/**
+ * This class implements a Reporter that displays messages on a text console
+ */
+class ConsoleReporter(
+ reader: BufferedReader = Console.in,
+ writer: PrintWriter = new PrintWriter(Console.err, true)
+) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering {
+
+ import MessageContainer._
+
+ /** maximal number of error messages to be printed */
+ protected def ErrorLimit = 100
+
+ /** Prints the message. */
+ def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() }
+
+ /** Prints the message with the given position indication. */
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ val didPrint = m match {
+ case m: Error =>
+ printMessage(messageAndPos(m.contained, m.pos, diagnosticLevel(m)))
+ if (ctx.settings.prompt.value) displayPrompt()
+ true
+ case m: ConditionalWarning if !m.enablingOption.value =>
+ false
+ case m =>
+ printMessage(messageAndPos(m.contained, m.pos, diagnosticLevel(m)))
+ true
+ }
+
+ if (didPrint && ctx.shouldExplain(m))
+ printMessage(explanation(m.contained))
+ else if (didPrint && m.contained.explanation.nonEmpty)
+ printMessage("\nlonger explanation available when compiling with `-explain`")
+ }
+
+ /** Show prompt if `-Xprompt` is passed as a flag to the compiler */
+ def displayPrompt()(implicit ctx: Context): Unit = {
+ printMessage("\na)bort, s)tack, r)esume: ")
+ flush()
+ if (reader != null) {
+ val response = reader.read().asInstanceOf[Char].toLower
+ if (response == 'a' || response == 's') {
+ Thread.dumpStack()
+ if (response == 'a')
+ sys.exit(1)
+ }
+ print("\n")
+ flush()
+ }
+ }
+
+ override def flush()(implicit ctx: Context): Unit = { writer.flush() }
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
new file mode 100644
index 000000000..ba1ab9b33
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
@@ -0,0 +1,21 @@
+package dotty.tools
+package dotc
+package reporting
+
+import core.Contexts.Context
+import diagnostic.MessageContainer
+
+/**
+ * This trait implements `isHidden` so that we avoid reporting non-sensical messages.
+ */
+trait HideNonSensicalMessages extends Reporter {
+ /** Hides non-sensical messages, unless we haven't reported any error yet or
+ * `-Yshow-suppressed-errors` is set.
+ */
+ override def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean =
+ super.isHidden(m) || {
+ m.isNonSensical &&
+ hasErrors && // if there are no errors yet, report even if diagnostic is non-sensical
+ !ctx.settings.YshowSuppressedErrors.value
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
new file mode 100644
index 000000000..24d583b19
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
@@ -0,0 +1,145 @@
+package dotty.tools
+package dotc
+package reporting
+
+import core.Contexts.Context
+import core.Decorators._
+import printing.Highlighting.{Blue, Red}
+import diagnostic.{Message, MessageContainer, NoExplanation}
+import diagnostic.messages._
+import util.SourcePosition
+
+import scala.collection.mutable
+
+trait MessageRendering {
+ /** Remove ANSI coloring from `str`, useful for getting real length of
+ * strings
+ *
+ * @return string stripped of ANSI escape codes
+ */
+ def stripColor(str: String): String =
+ str.replaceAll("\u001B\\[[;\\d]*m", "")
+
+ /** When inlining a method call, if there's an error we'd like to get the
+ * outer context and the `pos` at which the call was inlined.
+ *
+ * @return a list of strings with inline locations
+ */
+ def outer(pos: SourcePosition, prefix: String)(implicit ctx: Context): List[String] =
+ if (pos.outer.exists) {
+ s"$prefix| This location is in code that was inlined at ${pos.outer}" ::
+ outer(pos.outer, prefix)
+ } else Nil
+
+ /** Get the sourcelines before and after the position, as well as the offset
+ * for rendering line numbers
+ *
+ * @return (lines before error, lines after error, line numbers offset)
+ */
+ def sourceLines(pos: SourcePosition)(implicit ctx: Context): (List[String], List[String], Int) = {
+ var maxLen = Int.MinValue
+ def render(xs: List[Int]) =
+ xs.map(pos.source.offsetToLine(_))
+ .map { lineNbr =>
+ val prefix = s"${lineNbr + 1} |"
+ maxLen = math.max(maxLen, prefix.length)
+ (prefix, pos.lineContent(lineNbr).stripLineEnd)
+ }
+ .map { case (prefix, line) =>
+ val lnum = Red(" " * math.max(0, maxLen - prefix.length) + prefix)
+ hl"$lnum$line"
+ }
+
+ val (before, after) = pos.beforeAndAfterPoint
+ (render(before), render(after), maxLen)
+ }
+
+ /** The column markers aligned under the error */
+ def columnMarker(pos: SourcePosition, offset: Int)(implicit ctx: Context): String = {
+ val prefix = " " * (offset - 1)
+ val whitespace = " " * pos.startColumn
+ val carets = Red {
+ if (pos.startLine == pos.endLine)
+ "^" * math.max(1, pos.endColumn - pos.startColumn)
+ else "^"
+ }
+
+ s"$prefix|$whitespace${carets.show}"
+ }
+
+ /** The error message (`msg`) aligned under `pos`
+ *
+ * @return aligned error message
+ */
+ def errorMsg(pos: SourcePosition, msg: String, offset: Int)(implicit ctx: Context): String = {
+ val leastWhitespace = msg.lines.foldLeft(Int.MaxValue) { (minPad, line) =>
+ val lineLength = stripColor(line).length
+ val currPad = math.min(
+ math.max(0, ctx.settings.pageWidth.value - offset - lineLength),
+ offset + pos.startColumn
+ )
+
+ math.min(currPad, minPad)
+ }
+
+ msg.lines
+ .map { line => " " * (offset - 1) + "|" + (" " * (leastWhitespace - offset)) + line}
+ .mkString(sys.props("line.separator"))
+ }
+
+ /** The separator between errors containing the source file and error type
+ *
+ * @return separator containing error location and kind
+ */
+ def posStr(pos: SourcePosition, diagnosticLevel: String, message: Message)(implicit ctx: Context): String =
+ if (pos.exists) Blue({
+ val file = pos.source.file.toString
+ val errId =
+ if (message.errorId != NoExplanation.ID)
+ s"[E${"0" * (3 - message.errorId.toString.length) + message.errorId}] "
+ else ""
+ val kind =
+ if (message.kind == "") diagnosticLevel
+ else s"${message.kind} $diagnosticLevel"
+ val prefix = s"-- ${errId}${kind}: $file "
+
+ prefix +
+ ("-" * math.max(ctx.settings.pageWidth.value - stripColor(prefix).length, 0))
+ }).show else ""
+
+ /** Explanation rendered under "Explanation" header */
+ def explanation(m: Message)(implicit ctx: Context): String = {
+ val sb = new StringBuilder(
+ hl"""|
+ |${Blue("Explanation")}
+ |${Blue("===========")}"""
+ )
+ sb.append('\n').append(m.explanation)
+ if (m.explanation.lastOption != Some('\n')) sb.append('\n')
+ sb.toString
+ }
+
+ /** The whole message rendered from `msg` */
+ def messageAndPos(msg: Message, pos: SourcePosition, diagnosticLevel: String)(implicit ctx: Context): String = {
+ val sb = mutable.StringBuilder.newBuilder
+ sb.append(posStr(pos, diagnosticLevel, msg)).append('\n')
+ if (pos.exists) {
+ val (srcBefore, srcAfter, offset) = sourceLines(pos)
+ val marker = columnMarker(pos, offset)
+ val err = errorMsg(pos, msg.msg, offset)
+ sb.append((srcBefore ::: marker :: err :: outer(pos, " " * (offset - 1)) ::: srcAfter).mkString("\n"))
+ } else sb.append(msg.msg)
+ sb.toString
+ }
+
+ def diagnosticLevel(cont: MessageContainer): String =
+ cont match {
+ case m: Error => "Error"
+ case m: FeatureWarning => "Feature Warning"
+ case m: DeprecationWarning => "Deprecation Warning"
+ case m: UncheckedWarning => "Unchecked Warning"
+ case m: MigrationWarning => "Migration Warning"
+ case m: Warning => "Warning"
+ case m: Info => "Info"
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
new file mode 100644
index 000000000..8477cfe28
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
@@ -0,0 +1,296 @@
+package dotty.tools
+package dotc
+package reporting
+
+import core.Contexts._
+import util.{SourcePosition, NoSourcePosition}
+import core.Decorators.PhaseListDecorator
+import collection.mutable
+import config.Printers
+import java.lang.System.currentTimeMillis
+import core.Mode
+import dotty.tools.dotc.core.Symbols.Symbol
+import diagnostic.messages._
+import diagnostic._
+import Message._
+
+object Reporter {
+ /** Convert a SimpleReporter into a real Reporter */
+ def fromSimpleReporter(simple: interfaces.SimpleReporter): Reporter =
+ new Reporter with UniqueMessagePositions with HideNonSensicalMessages {
+ override def doReport(m: MessageContainer)(implicit ctx: Context): Unit = m match {
+ case m: ConditionalWarning if !m.enablingOption.value =>
+ case _ =>
+ simple.report(m)
+ }
+ }
+}
+
+import Reporter._
+
+trait Reporting { this: Context =>
+
+ /** For sending messages that are printed only if -verbose is set */
+ def inform(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ if (this.settings.verbose.value) this.echo(msg, pos)
+
+ def echo(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new Info(msg, pos))
+
+ def deprecationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new DeprecationWarning(msg, pos))
+
+ def migrationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new MigrationWarning(msg, pos))
+
+ def uncheckedWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new UncheckedWarning(msg, pos))
+
+ def featureWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new FeatureWarning(msg, pos))
+
+ def featureWarning(feature: String, featureDescription: String, isScala2Feature: Boolean,
+ featureUseSite: Symbol, required: Boolean, pos: SourcePosition): Unit = {
+ val req = if (required) "needs to" else "should"
+ val prefix = if (isScala2Feature) "scala." else "dotty."
+ val fqname = prefix + "language." + feature
+
+ val explain = {
+ if (reporter.isReportedFeatureUseSite(featureUseSite)) ""
+ else {
+ reporter.reportNewFeatureUseSite(featureUseSite)
+ s"""
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$feature.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled."""
+ }
+ }
+
+ val msg = s"$featureDescription $req be enabled\nby making the implicit value $fqname visible.$explain"
+ if (required) error(msg, pos)
+ else reporter.report(new FeatureWarning(msg, pos))
+ }
+
+ def warning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new Warning(msg, pos))
+
+ def strictWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ if (this.settings.strict.value) error(msg, pos)
+ else reporter.report {
+ new ExtendMessage(() => msg)(_ + "\n(This would be an error under strict mode)").warning(pos)
+ }
+
+ def error(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(new Error(msg, pos))
+
+ def errorOrMigrationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ if (ctx.scala2Mode) migrationWarning(msg, pos) else error(msg, pos)
+
+ def restrictionError(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report {
+ new ExtendMessage(() => msg)(m => s"Implementation restriction: $m").error(pos)
+ }
+
+ def incompleteInputError(msg: => Message, pos: SourcePosition = NoSourcePosition)(implicit ctx: Context): Unit =
+ reporter.incomplete(new Error(msg, pos))(ctx)
+
+ /** Log msg if settings.log contains the current phase.
+ * See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of
+ * "contains" here.
+ */
+ def log(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ if (this.settings.log.value.containsPhase(phase))
+ echo(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg", pos)
+
+ def debuglog(msg: => String): Unit =
+ if (ctx.debug) log(msg)
+
+ def informTime(msg: => String, start: Long): Unit = {
+ def elapsed = s" in ${currentTimeMillis - start}ms"
+ informProgress(msg + elapsed)
+ }
+
+ def informProgress(msg: => String) =
+ inform("[" + msg + "]")
+
+ def trace[T](msg: => String)(value: T) = {
+ log(msg + " " + value)
+ value
+ }
+
+ def debugwarn(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ if (this.settings.debug.value) warning(msg, pos)
+
+ @inline
+ def debugTraceIndented[TD](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => TD): TD =
+ conditionalTraceIndented(this.settings.debugTrace.value, question, printer, show)(op)
+
+ @inline
+ def conditionalTraceIndented[TC](cond: Boolean, question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => TC): TC =
+ if (cond) traceIndented[TC](question, printer, show)(op)
+ else op
+
+ @inline
+ def traceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T =
+ if (printer eq config.Printers.noPrinter) op
+ else doTraceIndented[T](question, printer, show)(op)
+
+ private def doTraceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T = {
+ def resStr(res: Any): String = res match {
+ case res: printing.Showable if show => res.show
+ case _ => String.valueOf(res)
+ }
+ // Avoid evaluating question multiple time, since each evaluation
+ // may cause some extra logging output.
+ lazy val q: String = question
+ doTraceIndented[T](s"==> $q?", (res: Any) => s"<== $q = ${resStr(res)}")(op)
+ }
+
+ def doTraceIndented[T](leading: => String, trailing: Any => String)(op: => T): T =
+ if (ctx.mode.is(Mode.Printing)) op
+ else {
+ var finalized = false
+ var logctx = this
+ while (logctx.reporter.isInstanceOf[StoreReporter]) logctx = logctx.outer
+ def finalize(result: Any, note: String) =
+ if (!finalized) {
+ base.indent -= 1
+ logctx.log(s"${base.indentTab * base.indent}${trailing(result)}$note")
+ finalized = true
+ }
+ try {
+ logctx.log(s"${base.indentTab * base.indent}$leading")
+ base.indent += 1
+ val res = op
+ finalize(res, "")
+ res
+ } catch {
+ case ex: Throwable =>
+ finalize("<missing>", s" (with exception $ex)")
+ throw ex
+ }
+ }
+
+ /** Implements a fold that applies the function `f` to the result of `op` if
+ * there are no new errors in the reporter
+ *
+ * @param op operation checked for errors
+ * @param f function applied to result of op
+ * @return either the result of `op` if it had errors or the result of `f`
+ * applied to it
+ */
+ def withNoError[A, B >: A](op: => A)(f: A => B): B = {
+ val before = reporter.errorCount
+ val op0 = op
+
+ if (reporter.errorCount > before) op0
+ else f(op0)
+ }
+}
+
+/**
+ * This interface provides methods to issue information, warning and
+ * error messages.
+ */
+abstract class Reporter extends interfaces.ReporterResult {
+
+ /** Report a diagnostic */
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit
+
+ /** Whether very long lines can be truncated. This exists so important
+ * debugging information (like printing the classpath) is not rendered
+ * invisible due to the max message length.
+ */
+ private var _truncationOK: Boolean = true
+ def truncationOK = _truncationOK
+ def withoutTruncating[T](body: => T): T = {
+ val saved = _truncationOK
+ _truncationOK = false
+ try body
+ finally _truncationOK = saved
+ }
+
+ type ErrorHandler = MessageContainer => Context => Unit
+ private var incompleteHandler: ErrorHandler = d => c => report(d)(c)
+ def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = {
+ val saved = incompleteHandler
+ incompleteHandler = handler
+ try op
+ finally incompleteHandler = saved
+ }
+
+ var errorCount = 0
+ var warningCount = 0
+ def hasErrors = errorCount > 0
+ def hasWarnings = warningCount > 0
+ private var errors: List[Error] = Nil
+ def allErrors = errors
+
+ /** Have errors been reported by this reporter, or in the
+ * case where this is a StoreReporter, by an outer reporter?
+ */
+ def errorsReported = hasErrors
+
+ private[this] var reportedFeaturesUseSites = Set[Symbol]()
+ def isReportedFeatureUseSite(featureTrait: Symbol): Boolean = reportedFeaturesUseSites.contains(featureTrait)
+ def reportNewFeatureUseSite(featureTrait: Symbol): Unit = reportedFeaturesUseSites += featureTrait
+
+ val unreportedWarnings = new mutable.HashMap[String, Int] {
+ override def default(key: String) = 0
+ }
+
+ def report(m: MessageContainer)(implicit ctx: Context): Unit =
+ if (!isHidden(m)) {
+ doReport(m)(ctx.addMode(Mode.Printing))
+ m match {
+ case m: ConditionalWarning if !m.enablingOption.value => unreportedWarnings(m.enablingOption.name) += 1
+ case m: Warning => warningCount += 1
+ case m: Error =>
+ errors = m :: errors
+ errorCount += 1
+ case m: Info => // nothing to do here
+ // match error if d is something else
+ }
+ }
+
+ def incomplete(m: MessageContainer)(implicit ctx: Context): Unit =
+ incompleteHandler(m)(ctx)
+
+ /** Summary of warnings and errors */
+ def summary: String = {
+ val b = new mutable.ListBuffer[String]
+ if (warningCount > 0)
+ b += countString(warningCount, "warning") + " found"
+ if (errorCount > 0)
+ b += countString(errorCount, "error") + " found"
+ for ((settingName, count) <- unreportedWarnings)
+ b += s"there were $count ${settingName.tail} warning(s); re-run with $settingName for details"
+ b.mkString("\n")
+ }
+
+ /** Print the summary of warnings and errors */
+ def printSummary(implicit ctx: Context): Unit = {
+ val s = summary
+ if (s != "") ctx.echo(s)
+ }
+
+ /** Returns a string meaning "n elements". */
+ protected def countString(n: Int, elements: String): String = n match {
+ case 0 => "no " + elements + "s"
+ case 1 => "one " + elements
+ case 2 => "two " + elements + "s"
+ case 3 => "three " + elements + "s"
+ case 4 => "four " + elements + "s"
+ case _ => n + " " + elements + "s"
+ }
+
+ /** Should this diagnostic not be reported at all? */
+ def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean = ctx.mode.is(Mode.Printing)
+
+ /** Does this reporter contain not yet reported errors or warnings? */
+ def hasPending: Boolean = false
+
+ /** Issue all error messages in this reporter to next outer one, or make sure they are written. */
+ def flush()(implicit ctx: Context): Unit = {}
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
new file mode 100644
index 000000000..586273c2e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
@@ -0,0 +1,46 @@
+package dotty.tools
+package dotc
+package reporting
+
+import core.Contexts.Context
+import collection.mutable
+import config.Printers.typr
+import diagnostic.MessageContainer
+import diagnostic.messages._
+
+/** This class implements a Reporter that stores all messages
+ *
+ * Beware that this reporter can leak memory, and force messages in two
+ * scenarios:
+ *
+ * - During debugging `config.Printers.typr` is set from `noPrinter` to `new
+ * Printer`, which forces the message
+ * - The reporter is not flushed and the message containers capture a
+ * `Context` (about 4MB)
+ */
+class StoreReporter(outer: Reporter) extends Reporter {
+
+ private var infos: mutable.ListBuffer[MessageContainer] = null
+
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ typr.println(s">>>> StoredError: ${m.message}") // !!! DEBUG
+ if (infos == null) infos = new mutable.ListBuffer
+ infos += m
+ }
+
+ override def hasPending: Boolean = infos != null && {
+ infos exists {
+ case _: Error => true
+ case _: Warning => true
+ case _ => false
+ }
+ }
+
+ override def flush()(implicit ctx: Context) =
+ if (infos != null) {
+ infos.foreach(ctx.reporter.report(_))
+ infos = null
+ }
+
+ override def errorsReported = hasErrors || outer.errorsReported
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
new file mode 100644
index 000000000..d8e03ab66
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
@@ -0,0 +1,20 @@
+package dotty.tools
+package dotc
+package reporting
+
+import core.Contexts.Context
+import collection.mutable
+import diagnostic.MessageContainer
+import diagnostic.messages.Error
+import Reporter._
+
+/**
+ * This class implements a Reporter that throws all errors and sends warnings and other
+ * info to the underlying reporter.
+ */
+class ThrowingReporter(reportInfo: Reporter) extends Reporter {
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = m match {
+ case _: Error => throw m
+ case _ => reportInfo.doReport(m)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
new file mode 100644
index 000000000..6fd971c2a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
@@ -0,0 +1,32 @@
+package dotty.tools
+package dotc
+package reporting
+
+import scala.collection.mutable
+import util.{SourcePosition, SourceFile}
+import core.Contexts.Context
+import diagnostic.MessageContainer
+
+/** This trait implements `isHidden` so that multiple messages per position
+ * are suppressed, unless they are of increasing severity. */
+trait UniqueMessagePositions extends Reporter {
+
+ private val positions = new mutable.HashMap[(SourceFile, Int), Int]
+
+ /** Logs a position and returns true if it was already logged.
+ * @note Two positions are considered identical for logging if they have the same point.
+ */
+ override def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean =
+ super.isHidden(m) || {
+ m.pos.exists && {
+ var shouldHide = false
+ for (pos <- m.pos.start to m.pos.end) {
+ positions get (ctx.source, pos) match {
+ case Some(level) if level >= m.level => shouldHide = true
+ case _ => positions((ctx.source, pos)) = m.level
+ }
+ }
+ shouldHide
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/diagnostic/Message.scala b/compiler/src/dotty/tools/dotc/reporting/diagnostic/Message.scala
new file mode 100644
index 000000000..2497fb216
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/diagnostic/Message.scala
@@ -0,0 +1,133 @@
+package dotty.tools
+package dotc
+package reporting
+package diagnostic
+
+import util.SourcePosition
+import core.Contexts.Context
+
+import messages._
+
+object Message {
+ /** This implicit conversion provides a fallback for error messages that have
+ * not yet been ported to the new scheme. Comment out this `implicit def` to
+ * see where old errors still exist
+ */
+ implicit def toNoExplanation(str: String): Message =
+ new NoExplanation(str)
+}
+
+/** A `Message` contains all semantic information necessary to easily
+ * comprehend what caused the message to be logged. Each message can be turned
+ * into a `MessageContainer` which contains the log level and can later be
+ * consumed by a subclass of `Reporter`. However, the error position is only
+ * part of `MessageContainer`, not `Message`.
+ *
+ * NOTE: you should not be persisting messages. Most messages take an implicit
+ * `Context` and these contexts weigh in at about 4mb per instance, as such
+ * persisting these will result in a memory leak.
+ *
+ * Instead use the `persist` method to create an instance that does not keep a
+ * reference to these contexts.
+ *
+ * @param errorId a unique number identifying the message, this will later be
+ * used to reference documentation online
+ */
+abstract class Message(val errorId: Int) { self =>
+ import messages._
+
+ /** The `msg` contains the diagnostic message e.g:
+ *
+ * > expected: String
+ * > found: Int
+ *
+ * This message will be placed underneath the position given by the enclosing
+ * `MessageContainer`
+ */
+ def msg: String
+
+ /** The kind of the error message is something like "Syntax" or "Type
+ * Mismatch"
+ */
+ def kind: String
+
+ /** The explanation should provide a detailed description of why the error
+ * occurred and use examples from the user's own code to illustrate how to
+ * avoid these errors.
+ */
+ def explanation: String
+
+ /** The implicit `Context` in messages is a large thing that we don't want
+ * persisted. This method gets around that by duplicating the message
+ * without the implicit context being passed along.
+ */
+ def persist: Message = new Message (errorId) {
+ val msg = self.msg
+ val kind = self.kind
+ val explanation = self.explanation
+ }
+}
+
+/** An extended message keeps the contained message from being evaluated, while
+ * allowing for extension for the `msg` string
+ *
+ * This is useful when we need to add additional information to an existing
+ * message.
+ */
+class ExtendMessage(_msg: () => Message)(f: String => String) { self =>
+ lazy val msg = f(_msg().msg)
+ lazy val kind = _msg().kind
+ lazy val explanation = _msg().explanation
+ lazy val errorId = _msg().errorId
+
+ private def toMessage = new Message(errorId) {
+ val msg = self.msg
+ val kind = self.kind
+ val explanation = self.explanation
+ }
+
+ /** Enclose this message in an `Error` container */
+ def error(pos: SourcePosition) =
+ new Error(toMessage, pos)
+
+ /** Enclose this message in an `Warning` container */
+ def warning(pos: SourcePosition) =
+ new Warning(toMessage, pos)
+
+ /** Enclose this message in an `Info` container */
+ def info(pos: SourcePosition) =
+ new Info(toMessage, pos)
+
+ /** Enclose this message in an `FeatureWarning` container */
+ def featureWarning(pos: SourcePosition) =
+ new FeatureWarning(toMessage, pos)
+
+ /** Enclose this message in an `UncheckedWarning` container */
+ def uncheckedWarning(pos: SourcePosition) =
+ new UncheckedWarning(toMessage, pos)
+
+ /** Enclose this message in an `DeprecationWarning` container */
+ def deprecationWarning(pos: SourcePosition) =
+ new DeprecationWarning(toMessage, pos)
+
+ /** Enclose this message in an `MigrationWarning` container */
+ def migrationWarning(pos: SourcePosition) =
+ new MigrationWarning(toMessage, pos)
+}
+
+/** The fallback `Message` containing no explanation and having no `kind` */
+class NoExplanation(val msg: String) extends Message(NoExplanation.ID) {
+ val explanation = ""
+ val kind = ""
+}
+
+/** The extractor for `NoExplanation` can be used to check whether any error
+ * lacks an explanation
+ */
+object NoExplanation {
+ final val ID = -1
+
+ def unapply(m: Message): Option[Message] =
+ if (m.explanation == "") Some(m)
+ else None
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala b/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
new file mode 100644
index 000000000..7fd50bfdc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
@@ -0,0 +1,74 @@
+package dotty.tools
+package dotc
+package reporting
+package diagnostic
+
+import util.SourcePosition
+import core.Contexts.Context
+
+import java.util.Optional
+
+object MessageContainer {
+ val nonSensicalStartTag = "<nonsensical>"
+ val nonSensicalEndTag = "</nonsensical>"
+
+ implicit class MessageContext(val c: Context) extends AnyVal {
+ def shouldExplain(cont: MessageContainer): Boolean = {
+ implicit val ctx: Context = c
+ cont.contained.explanation match {
+ case "" => false
+ case _ => ctx.settings.explain.value
+ }
+ }
+ }
+}
+
+class MessageContainer(
+ msgFn: => Message,
+ val pos: SourcePosition,
+ val level: Int
+) extends Exception with interfaces.Diagnostic {
+ import MessageContainer._
+ private var myMsg: String = null
+ private var myIsNonSensical: Boolean = false
+ private var myContained: Message = null
+
+ override def position: Optional[interfaces.SourcePosition] =
+ if (pos.exists && pos.source.exists) Optional.of(pos) else Optional.empty()
+
+ /** The message to report */
+ def message: String = {
+ if (myMsg == null) {
+ myMsg = contained.msg.replaceAll("\u001B\\[[;\\d]*m", "")
+ if (myMsg.contains(nonSensicalStartTag)) {
+ myIsNonSensical = true
+ // myMsg might be composed of several d"..." invocations -> nested
+ // nonsensical tags possible
+ myMsg =
+ myMsg
+ .replaceAllLiterally(nonSensicalStartTag, "")
+ .replaceAllLiterally(nonSensicalEndTag, "")
+ }
+ }
+ myMsg
+ }
+
+ def contained: Message = {
+ if (myContained == null)
+ myContained = msgFn
+
+ myContained
+ }
+
+ /** A message is non-sensical if it contains references to <nonsensical>
+ * tags. Such tags are inserted by the error diagnostic framework if a
+ * message contains references to internally generated error types. Normally
+ * we want to suppress error messages referring to types like this because
+ * they look weird and are normally follow-up errors to something that was
+ * diagnosed before.
+ */
+ def isNonSensical = { message; myIsNonSensical }
+
+ override def toString = s"$getClass at $pos: ${message}"
+ override def getMessage() = message
+}
diff --git a/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala b/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
new file mode 100644
index 000000000..489165e56
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
@@ -0,0 +1,902 @@
+package dotty.tools
+package dotc
+package reporting
+package diagnostic
+
+import dotc.core._
+import Contexts.Context, Decorators._, Symbols._, Names._, NameOps._, Types._
+import util.{SourceFile, NoSource}
+import util.{SourcePosition, NoSourcePosition}
+import config.Settings.Setting
+import interfaces.Diagnostic.{ERROR, WARNING, INFO}
+import printing.Highlighting._
+import printing.Formatting
+
+object messages {
+
+ // `MessageContainer`s to be consumed by `Reporter` ---------------------- //
+ class Error(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends MessageContainer(msgFn, pos, ERROR)
+
+ class Warning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends MessageContainer(msgFn, pos, WARNING)
+
+ class Info(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends MessageContainer(msgFn, pos, INFO)
+
+ abstract class ConditionalWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends Warning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context): Setting[Boolean]
+ }
+
+ class FeatureWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.feature
+ }
+
+ class UncheckedWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.unchecked
+ }
+
+ class DeprecationWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.deprecation
+ }
+
+ class MigrationWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.migration
+ }
+
+ /** Messages
+ * ========
+ * The role of messages is to provide the necessary details for a simple to
+ * understand diagnostic event. Each message can be turned into a message
+ * container (one of the above) by calling the appropriate method on them.
+ * For instance:
+ *
+ * ```scala
+ * EmptyCatchBlock(tree).error(pos) // res: Error
+ * EmptyCatchBlock(tree).warning(pos) // res: Warning
+ * ```
+ */
+ import ast.Trees._
+ import ast.untpd
+ import ast.tpd
+
+ /** Helper methods for messages */
+ def implicitClassRestrictionsText(implicit ctx: Context) =
+ hl"""|${NoColor("For a full list of restrictions on implicit classes visit")}
+ |${Blue("http://docs.scala-lang.org/overviews/core/implicit-classes.html")}"""
+
+
+ // Syntax Errors ---------------------------------------------------------- //
+ abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: Int)(implicit ctx: Context)
+ extends Message(errNo) {
+ val explanation = {
+ val tryString = tryBody match {
+ case Block(Nil, untpd.EmptyTree) => "{}"
+ case _ => tryBody.show
+ }
+
+ val code1 =
+ s"""|import scala.util.control.NonFatal
+ |
+ |try $tryString catch {
+ | case NonFatal(e) => ???
+ |}""".stripMargin
+
+ val code2 =
+ s"""|try $tryString finally {
+ | // perform your cleanup here!
+ |}""".stripMargin
+
+ hl"""|A ${"try"} expression should be followed by some mechanism to handle any exceptions
+ |thrown. Typically a ${"catch"} expression follows the ${"try"} and pattern matches
+ |on any expected exceptions. For example:
+ |
+ |$code1
+ |
+ |It is also possible to follow a ${"try"} immediately by a ${"finally"} - letting the
+ |exception propagate - but still allowing for some clean up in ${"finally"}:
+ |
+ |$code2
+ |
+ |It is recommended to use the ${"NonFatal"} extractor to catch all exceptions as it
+ |correctly handles transfer functions like ${"return"}."""
+ }
+ }
+
+ case class EmptyCatchBlock(tryBody: untpd.Tree)(implicit ctx: Context)
+ extends EmptyCatchOrFinallyBlock(tryBody, 1) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|The ${"catch"} block does not contain a valid expression, try
+ |adding a case like - `${"case e: Exception =>"}` to the block"""
+ }
+
+ case class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(implicit ctx: Context)
+ extends EmptyCatchOrFinallyBlock(tryBody, 2) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|A ${"try"} without ${"catch"} or ${"finally"} is equivalent to putting
+ |its body in a block; no exceptions are handled."""
+ }
+
+ case class DeprecatedWithOperator()(implicit ctx: Context)
+ extends Message(3) {
+ val kind = "Syntax"
+ val msg =
+ hl"""${"with"} as a type operator has been deprecated; use `&' instead"""
+ val explanation =
+ hl"""|Dotty introduces intersection types - `&' types. These replace the
+ |use of the ${"with"} keyword. There are a few differences in
+ |semantics between intersection types and using `${"with"}'."""
+ }
+
+ case class CaseClassMissingParamList(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(4) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|A ${"case class"} must have at least one parameter list"""
+
+ val explanation =
+ hl"""|${cdef.name} must have at least one parameter list, if you would rather
+ |have a singleton representation of ${cdef.name}, use a "${"case object"}".
+ |Or, add an explicit `()' as a parameter list to ${cdef.name}."""
+ }
+
+
+ // Type Errors ------------------------------------------------------------ //
+ case class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(implicit ctx: Context)
+ extends Message(5) {
+ val kind = "Naming"
+ val msg = em"duplicate pattern variable: `${bind.name}`"
+
+ val explanation = {
+ val pat = tree.pat.show
+ val guard = tree.guard match {
+ case untpd.EmptyTree => ""
+ case guard => s"if ${guard.show}"
+ }
+
+ val body = tree.body match {
+ case Block(Nil, untpd.EmptyTree) => ""
+ case body => s" ${body.show}"
+ }
+
+ val caseDef = s"case $pat$guard => $body"
+
+ hl"""|For each ${"case"} bound variable names have to be unique. In:
+ |
+ |$caseDef
+ |
+ |`${bind.name}` is not unique. Rename one of the bound variables!"""
+ }
+ }
+
+ case class MissingIdent(tree: untpd.Ident, treeKind: String, name: String)(implicit ctx: Context)
+ extends Message(6) {
+ val kind = "Unbound Identifier"
+ val msg = em"not found: $treeKind$name"
+
+ val explanation = {
+ hl"""|The identifier for `$treeKind$name` is not bound, that is,
+ |no declaration for this identifier can be found.
+ |That can happen for instance if $name or its declaration has either been
+ |misspelt, or if you're forgetting an import"""
+ }
+ }
+
+ case class TypeMismatch(found: Type, expected: Type, whyNoMatch: String = "", implicitFailure: String = "")(implicit ctx: Context)
+ extends Message(7) {
+ val kind = "Type Mismatch"
+ val msg = {
+ val (where, printCtx) = Formatting.disambiguateTypes(found, expected)
+ val (fnd, exp) = Formatting.typeDiff(found, expected)(printCtx)
+ s"""|found: $fnd
+ |required: $exp
+ |
+ |$where""".stripMargin + whyNoMatch + implicitFailure
+ }
+
+ val explanation = ""
+ }
+
+ case class NotAMember(site: Type, name: Name, selected: String)(implicit ctx: Context)
+ extends Message(8) {
+ val kind = "Member Not Found"
+
+ val msg = {
+ import core.Flags._
+ val maxDist = 3
+ val decls = site.decls.flatMap { sym =>
+ if (sym.is(Synthetic | PrivateOrLocal) || sym.isConstructor) Nil
+ else List((sym.name.show, sym))
+ }
+
+ // Calculate Levenshtein distance
+ def distance(n1: Iterable[_], n2: Iterable[_]) =
+ n1.foldLeft(List.range(0, n2.size)) { (prev, x) =>
+ (prev zip prev.tail zip n2).scanLeft(prev.head + 1) {
+ case (h, ((d, v), y)) => math.min(
+ math.min(h + 1, v + 1),
+ if (x == y) d else d + 1
+ )
+ }
+ }.last
+
+ // Count number of wrong characters
+ def incorrectChars(x: (String, Int, Symbol)): (String, Symbol, Int) = {
+ val (currName, _, sym) = x
+ val matching = name.show.zip(currName).foldLeft(0) {
+ case (acc, (x,y)) => if (x != y) acc + 1 else acc
+ }
+ (currName, sym, matching)
+ }
+
+ // Get closest match in `site`
+ val closest =
+ decls
+ .map { case (n, sym) => (n, distance(n, name.show), sym) }
+ .collect { case (n, dist, sym) if dist <= maxDist => (n, dist, sym) }
+ .groupBy(_._2).toList
+ .sortBy(_._1)
+ .headOption.map(_._2).getOrElse(Nil)
+ .map(incorrectChars).toList
+ .sortBy(_._3)
+ .take(1).map { case (n, sym, _) => (n, sym) }
+
+ val siteName = site match {
+ case site: NamedType => site.name.show
+ case site => i"$site"
+ }
+
+ val closeMember = closest match {
+ case (n, sym) :: Nil => hl""" - did you mean `${s"$siteName.$n"}`?"""
+ case Nil => ""
+ case _ => assert(
+ false,
+ "Could not single out one distinct member to match on input with"
+ )
+ }
+
+ ex"$selected `$name` is not a member of $site$closeMember"
+ }
+
+ val explanation = ""
+ }
+
+ case class EarlyDefinitionsNotSupported()(implicit ctx: Context)
+ extends Message(9) {
+ val kind = "Syntax"
+ val msg = "early definitions are not supported; use trait parameters instead"
+
+ val explanation = {
+ val code1 =
+ """|trait Logging {
+ | val f: File
+ | f.open()
+ | onExit(f.close())
+ | def log(msg: String) = f.write(msg)
+ |}
+ |
+ |class B extends Logging {
+ | val f = new File("log.data") // triggers a NullPointerException
+ |}
+ |
+ |// early definition gets around the NullPointerException
+ |class C extends {
+ | val f = new File("log.data")
+ |} with Logging""".stripMargin
+
+ val code2 =
+ """|trait Logging(f: File) {
+ | f.open()
+ | onExit(f.close())
+ | def log(msg: String) = f.write(msg)
+ |}
+ |
+ |class C extends Logging(new File("log.data"))""".stripMargin
+
+ hl"""|Earlier versions of Scala did not support trait parameters and "early
+ |definitions" (also known as "early initializers") were used as an alternative.
+ |
+ |Example of old syntax:
+ |
+ |$code1
+ |
+ |The above code can now be written as:
+ |
+ |$code2
+ |"""
+ }
+ }
+
+ case class TopLevelImplicitClass(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(10) {
+ val kind = "Syntax"
+ val msg = hl"""An ${"implicit class"} may not be top-level"""
+
+ val explanation = {
+ val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef
+ val exampleArgs =
+ constr0.vparamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ")
+ def defHasBody[T] = impl.body.exists(!_.isEmpty)
+ val exampleBody = if (defHasBody) "{\n ...\n }" else ""
+ hl"""|There may not be any method, member or object in scope with the same name as
+ |the implicit class and a case class automatically gets a companion object with
+ |the same name created by the compiler which would cause a naming conflict if it
+ |were allowed.
+ |
+ |""" + implicitClassRestrictionsText + hl"""|
+ |
+ |To resolve the conflict declare ${cdef.name} inside of an ${"object"} then import the class
+ |from the object at the use site if needed, for example:
+ |
+ |object Implicits {
+ | implicit class ${cdef.name}($exampleArgs)$exampleBody
+ |}
+ |
+ |// At the use site:
+ |import Implicits.${cdef.name}"""
+ }
+ }
+
+ case class ImplicitCaseClass(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(11) {
+ val kind = "Syntax"
+ val msg = hl"""A ${"case class"} may not be defined as ${"implicit"}"""
+
+ val explanation =
+ hl"""|implicit classes may not be case classes. Instead use a plain class:
+ |
+ |implicit class ${cdef.name}...
+ |
+ |""" + implicitClassRestrictionsText
+ }
+
+ case class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(implicit ctx: Context)
+ extends Message(12) {
+ val kind = "Syntax"
+ val msg = hl"""${"object"}s must not have a self ${"type"}"""
+
+ val explanation = {
+ val untpd.ModuleDef(name, tmpl) = mdef
+ val ValDef(_, selfTpt, _) = tmpl.self
+ hl"""|${"object"}s must not have a self ${"type"}:
+ |
+ |Consider these alternative solutions:
+ | - Create a trait or a class instead of an object
+ | - Let the object extend a trait containing the self type:
+ |
+ | object $name extends ${selfTpt.show}"""
+ }
+ }
+
+ case class TupleTooLong(ts: List[untpd.Tree])(implicit ctx: Context)
+ extends Message(13) {
+ import Definitions.MaxTupleArity
+ val kind = "Syntax"
+ val msg = hl"""A ${"tuple"} cannot have more than ${MaxTupleArity} members"""
+
+ val explanation = {
+ val members = ts.map(_.showSummary).grouped(MaxTupleArity)
+ val nestedRepresentation = members.map(_.mkString(", ")).mkString(")(")
+ hl"""|This restriction will be removed in the future.
+ |Currently it is possible to use nested tuples when more than $MaxTupleArity are needed, for example:
+ |
+ |((${nestedRepresentation}))"""
+ }
+ }
+
+ case class RepeatedModifier(modifier: String)(implicit ctx:Context)
+ extends Message(14) {
+ val kind = "Syntax"
+ val msg = hl"""repeated modifier $modifier"""
+
+ val explanation = {
+ val code1 = hl"""private private val Origin = Point(0, 0)"""
+ val code2 = hl"""private final val Origin = Point(0, 0)"""
+ hl"""This happens when you accidentally specify the same modifier twice.
+ |
+ |Example:
+ |
+ |$code1
+ |
+ |instead of
+ |
+ |$code2
+ |
+ |"""
+ }
+ }
+
+ case class InterpolatedStringError()(implicit ctx:Context)
+ extends Message(15) {
+ val kind = "Syntax"
+ val msg = "error in interpolated string: identifier or block expected"
+ val explanation = {
+ val code1 = "s\"$new Point(0, 0)\""
+ val code2 = "s\"${new Point(0, 0)}\""
+ hl"""|This usually happens when you forget to place your expressions inside curly braces.
+ |
+ |$code1
+ |
+ |should be written as
+ |
+ |$code2
+ |"""
+ }
+ }
+
+ case class UnboundPlaceholderParameter()(implicit ctx:Context)
+ extends Message(16) {
+ val kind = "Syntax"
+ val msg = "unbound placeholder parameter; incorrect use of `_`"
+ val explanation =
+ hl"""|The `_` placeholder syntax was used where it could not be bound.
+ |Consider explicitly writing the variable binding.
+ |
+ |This can be done by replacing `_` with a variable (eg. `x`)
+ |and adding ${"x =>"} where applicable.
+ |
+ |Example before:
+ |
+ |${"{ _ }"}
+ |
+ |Example after:
+ |
+ |${"x => { x }"}
+ |
+ |Another common occurrence for this error is defining a val with `_`:
+ |
+ |${"val a = _"}
+ |
+ |But this val definition isn't very useful, it can never be assigned
+ |another value. And thus will always remain uninitialized.
+ |Consider replacing the ${"val"} with ${"var"}:
+ |
+ |${"var a = _"}
+ |
+ |Note that this use of `_` is not placeholder syntax,
+ |but an uninitialized var definition"""
+ }
+
+ case class IllegalStartSimpleExpr(illegalToken: String)(implicit ctx: Context)
+ extends Message(17) {
+ val kind = "Syntax"
+ val msg = "illegal start of simple expression"
+ val explanation = {
+ hl"""|An expression yields a value. In the case of the simple expression, this error
+ |commonly occurs when there's a missing parenthesis or brace. The reason being
+ |that a simple expression is one of the following:
+ |
+ |- Block
+ |- Expression in parenthesis
+ |- Identifier
+ |- Object creation
+ |- Literal
+ |
+ |which cannot start with ${Red(illegalToken)}."""
+ }
+ }
+
+ case class MissingReturnType()(implicit ctx:Context) extends Message(18) {
+ val kind = "Syntax"
+ val msg = "missing return type"
+ val explanation =
+ hl"""|An abstract declaration must have a return type. For example:
+ |
+ |trait Shape {
+ | def area: Double // abstract declaration returning a ${"Double"}
+ |}"""
+ }
+
+ case class YieldOrDoExpectedInForComprehension()(implicit ctx: Context)
+ extends Message(19) {
+ val kind = "Syntax"
+ val msg = hl"${"yield"} or ${"do"} expected"
+
+ val explanation =
+ hl"""|When the enumerators in a for comprehension are not placed in parentheses or
+ |braces, a ${"do"} or ${"yield"} statement is required after the enumerators
+ |section of the comprehension.
+ |
+ |You can save some keystrokes by omitting the parentheses and writing
+ |
+ |${"val numbers = for i <- 1 to 3 yield i"}
+ |
+ | instead of
+ |
+ |${"val numbers = for (i <- 1 to 3) yield i"}
+ |
+ |but the ${"yield"} keyword is still required.
+ |
+ |For comprehensions that simply perform a side effect without yielding anything
+ |can also be written without parentheses but a ${"do"} keyword has to be
+ |included. For example,
+ |
+ |${"for (i <- 1 to 3) println(i)"}
+ |
+ |can be written as
+ |
+ |${"for i <- 1 to 3 do println(i) // notice the 'do' keyword"}
+ |
+ |"""
+ }
+
+ case class ProperDefinitionNotFound()(implicit ctx: Context)
+ extends Message(20) {
+ val kind = "Definition Not Found"
+ val msg = hl"""Proper definition was not found in ${"@usecase"}"""
+
+ val explanation = {
+ val noUsecase =
+ "def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That"
+
+ val usecase =
+ """|/** Map from List[A] => List[B]
+ | *
+ | * @usecase def map[B](f: A => B): List[B]
+ | */
+ |def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That
+ |""".stripMargin
+
+ hl"""|Usecases are only supported for ${"def"}s. They exist because with Scala's
+ |advanced type-system, we sometimes end up with seemingly scary signatures.
+ |The usage of these methods, however, needs not be - for instance the `map`
+ |function
+ |
+ |${"List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)"}
+ |
+ |is easy to understand and use - but has a rather bulky signature:
+ |
+ |$noUsecase
+ |
+ |to mitigate this and ease the usage of such functions we have the ${"@usecase"}
+ |annotation for docstrings. Which can be used like this:
+ |
+ |$usecase
+ |
+ |When creating the docs, the signature of the method is substituted by the
+ |usecase and the compiler makes sure that it is valid. Because of this, you're
+ |only allowed to use ${"def"}s when defining usecases."""
+ }
+ }
+
+ case class ByNameParameterNotSupported()(implicit ctx: Context)
+ extends Message(21) {
+ val kind = "Syntax"
+ val msg = "By-name parameter type not allowed here."
+
+ val explanation =
+ hl"""|By-name parameters act like functions that are only evaluated when referenced,
+ |allowing for lazy evaluation of a parameter.
+ |
+ |An example of using a by-name parameter would look like:
+ |${"def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function"}
+ |
+ |An example of the syntax of passing an actual function as a parameter:
+ |${"def func(f: (Boolean => Boolean)) = f(true)"}
+ |
+ |or:
+ |
+ |${"def func(f: Boolean => Boolean) = f(true)"}
+ |
+ |And the usage could be as such:
+ |${"func(bool => // do something...)"}
+ |"""
+ }
+
+ case class WrongNumberOfArgs(fntpe: Type, argKind: String, expectedArgs: List[TypeParamInfo], actual: List[untpd.Tree])(implicit ctx: Context)
+ extends Message(22) {
+ val kind = "Syntax"
+
+ private val expectedCount = expectedArgs.length
+ private val actualCount = actual.length
+ private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough"
+
+ //TODO add def simpleParamName to TypeParamInfo
+ private val expectedArgString = fntpe
+ .widen.typeParams
+ .map(_.paramName.unexpandedName.show)
+ .mkString("[", ", ", "]")
+
+ private val actualArgString = actual.map(_.show).mkString("[", ", ", "]")
+
+ private val prettyName = fntpe.termSymbol match {
+ case NoSymbol => fntpe.show
+ case symbol => symbol.showFullName
+ }
+
+ val msg =
+ hl"""|${NoColor(msgPrefix)} ${argKind} arguments for $prettyName$expectedArgString
+ |expected: $expectedArgString
+ |actual: $actualArgString""".stripMargin
+
+ val explanation = {
+ val tooManyTypeParams =
+ """|val tuple2: (Int, String) = (1, "one")
+ |val list: List[(Int, String)] = List(tuple2)""".stripMargin
+
+ if (actualCount > expectedCount)
+ hl"""|You have supplied too many type parameters
+ |
+ |For example List takes a single type parameter (List[A])
+ |If you need to hold more types in a list then you need to combine them
+ |into another data type that can contain the number of types you need,
+ |In this example one solution would be to use a Tuple:
+ |
+ |${tooManyTypeParams}"""
+ else
+ hl"""|You have not supplied enough type parameters
+ |If you specify one type parameter then you need to specify every type parameter."""
+ }
+ }
+
+ case class IllegalVariableInPatternAlternative()(implicit ctx: Context)
+ extends Message(23) {
+ val kind = "Syntax"
+ val msg = "Variables are not allowed in alternative patterns"
+ val explanation = {
+ val varInAlternative =
+ """|def g(pair: (Int,Int)): Int = pair match {
+ | case (1, n) | (n, 1) => n
+ | case _ => 0
+ |}""".stripMargin
+
+ val fixedVarInAlternative =
+ """|def g(pair: (Int,Int)): Int = pair match {
+ | case (1, n) => n
+ | case (n, 1) => n
+ | case _ => 0
+ |}""".stripMargin
+
+ hl"""|Variables are not allowed within alternate pattern matches. You can workaround
+ |this issue by adding additional cases for each alternative. For example, the
+ |illegal function:
+ |
+ |$varInAlternative
+ |could be implemented by moving each alternative into a separate case:
+ |
+ |$fixedVarInAlternative"""
+ }
+ }
+
+ case class TypeParamsTypeExpected(mods: untpd.Modifiers, identifier: TermName)(implicit ctx: Context)
+ extends Message(24) {
+ val kind = "Syntax"
+ val msg = hl"""Expected ${"type"} keyword for type parameter $identifier"""
+ val explanation =
+ hl"""|This happens when you add modifiers like ${"private"} or ${"protected"}
+ |to your type parameter definition without adding the ${"type"} keyword.
+ |
+ |Add ${"type"} to your code, e.g.:
+ |${s"trait A[${mods.flags} type $identifier]"}
+ |"""
+ }
+
+ case class IdentifierExpected(identifier: String)(implicit ctx: Context)
+ extends Message(25) {
+ val kind = "Syntax"
+ val msg = "identifier expected"
+ val explanation = {
+ val wrongIdentifier = s"def foo: $identifier = {...}"
+ val validIdentifier = s"def foo = {...}"
+ hl"""|An identifier expected, but `$identifier` found. This could be because
+ |`$identifier` is not a valid identifier. As a workaround, the compiler could
+ |infer the type for you. For example, instead of:
+ |
+ |$wrongIdentifier
+ |
+ |Write your code like:
+ |
+ |$validIdentifier
+ |
+ |"""
+ }
+ }
+
+ case class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context)
+ extends Message(26) {
+ val kind = "Syntax"
+ val msg = "auxiliary constructor needs non-implicit parameter list"
+ val explanation =
+ hl"""|Only the primary constructor is allowed an ${"implicit"} parameter list;
+ |auxiliary constructors need non-implicit parameter lists. When a primary
+ |constructor has an implicit argslist, auxiliary constructors that call the
+ |primary constructor must specify the implicit value.
+ |
+ |To resolve this issue check for:
+ | - forgotten parenthesis on ${"this"} (${"def this() = { ... }"})
+ | - auxiliary constructors specify the implicit value
+ |"""
+ }
+
+ case class IncorrectRepeatedParameterSyntax()(implicit ctx: Context) extends Message(27) {
+ val kind = "Syntax"
+ val msg = "'*' expected"
+ val explanation =
+ hl"""|Expected * in '_*' operator.
+ |
+ |The '_*' operator can be used to supply a sequence-based argument
+ |to a method with a variable-length or repeated parameter. It is used
+ |to expand the sequence to a variable number of arguments, such that:
+ |func(args: _*) would expand to func(arg1, arg2 ... argN).
+ |
+ |Below is an example of how a method with a variable-length
+ |parameter can be declared and used.
+ |
+ |Squares the arguments of a variable-length parameter:
+ |${"def square(args: Int*) = args.map(a => a * a)"}
+ |
+ |Usage:
+ |${"square(1, 2, 3) // res0: List[Int] = List(1, 4, 9)"}
+ |
+ |Secondary Usage with '_*':
+ |${"val ints = List(2, 3, 4) // ints: List[Int] = List(2, 3, 4)"}
+ |${"square(ints: _*) // res1: List[Int] = List(4, 9, 16)"}
+ |""".stripMargin
+ }
+
+ case class IllegalLiteral()(implicit ctx: Context) extends Message(28) {
+ val kind = "Syntax"
+ val msg = "illegal literal"
+ val explanation =
+ hl"""|Available literals can be divided into several groups:
+ | - Integer literals: 0, 21, 0xFFFFFFFF, -42L
+ | - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1
+ | - Boolean Literals: true, false
+ | - Character Literals: 'a', '\u0041', '\n'
+ | - String Literals: "Hello, World!"
+ | - null
+ |"""
+ }
+
+ case class PatternMatchExhaustivity(uncovered: String)(implicit ctx: Context)
+ extends Message(29) {
+ val kind = "Pattern Match Exhaustivity"
+ val msg =
+ hl"""|match may not be exhaustive.
+ |
+ |It would fail on: $uncovered"""
+
+
+ val explanation = ""
+ }
+
+ case class MatchCaseUnreachable()(implicit ctx: Context)
+ extends Message(30) {
+ val kind = s"""Match ${hl"case"} Unreachable"""
+ val msg = "unreachable code"
+ val explanation = ""
+ }
+
+ case class SeqWildcardPatternPos()(implicit ctx: Context)
+ extends Message(31) {
+ val kind = "Syntax"
+ val msg = "`_*' can be used only for last argument"
+ val explanation = {
+ val code =
+ """def sumOfTheFirstTwo(list: List[Int]): Int = list match {
+ | case List(first, second, x:_*) => first + second
+ | case _ => 0
+ |}"""
+ hl"""|Sequence wildcard pattern is expected at the end of an argument list.
+ |This pattern matches any remaining elements in a sequence.
+ |Consider the following example:
+ |
+ |$code
+ |
+ |Calling:
+ |
+ |${"sumOfTheFirstTwo(List(1, 2, 10))"}
+ |
+ |would give 3 as a result"""
+ }
+ }
+
+ case class IllegalStartOfSimplePattern()(implicit ctx: Context) extends Message(32) {
+ val kind = "Syntax"
+ val msg = "illegal start of simple pattern"
+ val explanation = {
+ val sipCode =
+ """def f(x: Int, y: Int) = x match {
+ | case `y` => ...
+ |}
+ """
+ val constructorPatternsCode =
+ """case class Person(name: String, age: Int)
+ |
+ |def test(p: Person) = p match {
+ | case Person(name, age) => ...
+ |}
+ """
+ val tupplePatternsCode =
+ """def swap(tuple: (String, Int)): (Int, String) = tuple match {
+ | case (text, number) => (number, text)
+ |}
+ """
+ val patternSequencesCode =
+ """def getSecondValue(list: List[Int]): Int = list match {
+ | case List(_, second, x:_*) => second
+ | case _ => 0
+ |}"""
+ hl"""|Simple patterns can be divided into several groups:
+ |- Variable Patterns: ${"case x => ..."}.
+ | It matches any value, and binds the variable name to that value.
+ | A special case is the wild-card pattern _ which is treated as if it was a fresh
+ | variable on each occurrence.
+ |
+ |- Typed Patterns: ${"case x: Int => ..."} or ${"case _: Int => ..."}.
+ | This pattern matches any value matched by the specified type; it binds the variable
+ | name to that value.
+ |
+ |- Literal Patterns: ${"case 123 => ..."} or ${"case 'A' => ..."}.
+ | This type of pattern matches any value that is equal to the specified literal.
+ |
+ |- Stable Identifier Patterns:
+ |
+ | $sipCode
+ |
+ | the match succeeds only if the x argument and the y argument of f are equal.
+ |
+ |- Constructor Patterns:
+ |
+ | $constructorPatternsCode
+ |
+ | The pattern binds all object's fields to the variable names (name and age, in this
+ | case).
+ |
+ |- Tuple Patterns:
+ |
+ | $tupplePatternsCode
+ |
+ | Calling:
+ |
+ | ${"""swap(("Luftballons", 99)"""}
+ |
+ | would give ${"""(99, "Luftballons")"""} as a result.
+ |
+ |- Pattern Sequences:
+ |
+ | $patternSequencesCode
+ |
+ | Calling:
+ |
+ | ${"getSecondValue(List(1, 10, 2))"}
+ |
+ | would give 10 as a result.
+ | This pattern is possible because a companion object for the List class has a method
+ | with the following signature:
+ |
+ | ${"def unapplySeq[A](x: List[A]): Some[List[A]]"}
+ |"""
+ }
+ }
+
+ case class PkgDuplicateSymbol(existing: Symbol)(implicit ctx: Context)
+ extends Message(33) {
+ val kind = "Duplicate Symbol"
+ val msg = hl"trying to define package with same name as `$existing`"
+ val explanation = ""
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/rewrite/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrite/Rewrites.scala
new file mode 100644
index 000000000..c42c808fe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/rewrite/Rewrites.scala
@@ -0,0 +1,92 @@
+package dotty.tools.dotc
+package rewrite
+
+import util.{SourceFile, Positions}
+import Positions.Position
+import core.Contexts.{Context, FreshContext}
+import collection.mutable
+
+/** Handles rewriting of Scala2 files to Dotty */
+object Rewrites {
+ private class PatchedFiles extends mutable.HashMap[SourceFile, Patches]
+
+ private case class Patch(pos: Position, replacement: String) {
+ def delta = replacement.length - (pos.end - pos.start)
+ }
+
+ private class Patches(source: SourceFile) {
+ private val pbuf = new mutable.ListBuffer[Patch]()
+
+ def addPatch(pos: Position, replacement: String): Unit =
+ pbuf += Patch(pos, replacement)
+
+ def apply(cs: Array[Char]): Array[Char] = {
+ val delta = pbuf.map(_.delta).sum
+ val patches = pbuf.toList.sortBy(_.pos.start)
+ if (patches.nonEmpty)
+ patches reduceLeft {(p1, p2) =>
+ assert(p1.pos.end <= p2.pos.start, s"overlapping patches: $p1 and $p2")
+ p2
+ }
+ val ds = new Array[Char](cs.length + delta)
+ def loop(ps: List[Patch], inIdx: Int, outIdx: Int): Unit = {
+ def copy(upTo: Int): Int = {
+ val untouched = upTo - inIdx
+ Array.copy(cs, inIdx, ds, outIdx, untouched)
+ outIdx + untouched
+ }
+ ps match {
+ case patch @ Patch(pos, replacement) :: ps1 =>
+ val outNew = copy(pos.start)
+ replacement.copyToArray(ds, outNew)
+ loop(ps1, pos.end, outNew + replacement.length)
+ case Nil =>
+ val outNew = copy(cs.length)
+ assert(outNew == ds.length, s"$outNew != ${ds.length}")
+ }
+ }
+ loop(patches, 0, 0)
+ ds
+ }
+
+ def writeBack(): Unit = {
+ val out = source.file.output
+ val chars = apply(source.underlying.content)
+ val bytes = new String(chars).getBytes
+ out.write(bytes)
+ out.close()
+ }
+ }
+
+ /** If -rewrite is set, record a patch that replaces the range
+ * given by `pos` in `source` by `replacement`
+ */
+ def patch(source: SourceFile, pos: Position, replacement: String)(implicit ctx: Context): Unit =
+ for (rewrites <- ctx.settings.rewrite.value)
+ rewrites.patched
+ .getOrElseUpdate(source, new Patches(source))
+ .addPatch(pos, replacement)
+
+ /** Patch position in `ctx.compilationUnit.source`. */
+ def patch(pos: Position, replacement: String)(implicit ctx: Context): Unit =
+ patch(ctx.compilationUnit.source, pos, replacement)
+
+ /** If -rewrite is set, apply all patches and overwrite patched source files.
+ */
+ def writeBack()(implicit ctx: Context) =
+ for (rewrites <- ctx.settings.rewrite.value; source <- rewrites.patched.keys) {
+ ctx.echo(s"[patched file ${source.file.path}]")
+ rewrites.patched(source).writeBack()
+ }
+}
+
+/** A completely encapsulated class representing rewrite state, used
+ * as an optional setting.
+ */
+class Rewrites {
+ import Rewrites._
+ private val patched = new PatchedFiles
+}
+
+
+
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
new file mode 100644
index 000000000..bc8528c05
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
@@ -0,0 +1,518 @@
+package dotty.tools.dotc
+package sbt
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._
+import Names._, NameOps._, StdNames._
+import typer.Inliner
+
+import dotty.tools.io.Path
+import java.io.PrintWriter
+
+import scala.collection.mutable
+
+/** This phase sends a representation of the API of classes to sbt via callbacks.
+ *
+ * This is used by sbt for incremental recompilation.
+ *
+ * See the documentation of `ExtractAPICollector`, `ExtractDependencies`,
+ * `ExtractDependenciesCollector` and
+ * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html for more
+ * information on incremental recompilation.
+ *
+ * The following flags affect this phase:
+ * -Yforce-sbt-phases
+ * -Ydump-sbt-inc
+ *
+ * @see ExtractDependencies
+ */
+class ExtractAPI extends Phase {
+ override def phaseName: String = "sbt-api"
+
+ // SuperAccessors need to be part of the API (see the scripted test
+ // `trait-super` for an example where this matters), this is only the case
+ // after `PostTyper` (unlike `ExtractDependencies`, the simplication to trees
+ // done by `PostTyper` do not affect this phase because it only cares about
+ // definitions, and `PostTyper` does not change definitions).
+ override def runsAfter = Set(classOf[transform.PostTyper])
+
+ override def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ val dumpInc = ctx.settings.YdumpSbtInc.value
+ val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value
+ if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) {
+ val sourceFile = unit.source.file.file
+ val apiTraverser = new ExtractAPICollector
+ val source = apiTraverser.apiSource(unit.tpdTree)
+
+ if (dumpInc) {
+ // Append to existing file that should have been created by ExtractDependencies
+ val pw = new PrintWriter(Path(sourceFile).changeExtension("inc").toFile
+ .bufferedWriter(append = true), true)
+ try {
+ pw.println(DefaultShowAPI(source))
+ } finally pw.close()
+ }
+
+ if (ctx.sbtCallback != null)
+ ctx.sbtCallback.api(sourceFile, source)
+ }
+ }
+}
+
+/** Extracts full (including private members) API representation out of Symbols and Types.
+ *
+ * The exact representation used for each type is not important: the only thing
+ * that matters is that a binary-incompatible or source-incompatible change to
+ * the API (for example, changing the signature of a method, or adding a parent
+ * to a class) should result in a change to the API representation so that sbt
+ * can recompile files that depend on this API.
+ *
+ * Note that we only records types as they are defined and never "as seen from"
+ * some other prefix because `Types#asSeenFrom` is a complex operation and
+ * doing it for every inherited member would be slow, and because the number
+ * of prefixes can be enormous in some cases:
+ *
+ * class Outer {
+ * type T <: S
+ * type S
+ * class A extends Outer { /*...*/ }
+ * class B extends Outer { /*...*/ }
+ * class C extends Outer { /*...*/ }
+ * class D extends Outer { /*...*/ }
+ * class E extends Outer { /*...*/ }
+ * }
+ *
+ * `S` might be refined in an arbitrary way inside `A` for example, this
+ * affects the type of `T` as seen from `Outer#A`, so we could record that, but
+ * the class `A` also contains itself as a member, so `Outer#A#A#A#...` is a
+ * valid prefix for `T`. Even if we avoid loops, we still have a combinatorial
+ * explosion of possible prefixes, like `Outer#A#B#C#D#E`.
+ *
+ * It is much simpler to record `T` once where it is defined, but that means
+ * that the API representation of `T` may not change even though `T` as seen
+ * from some prefix has changed. This is why in `ExtractDependencies` we need
+ * to traverse used types to not miss dependencies, see the documentation of
+ * `ExtractDependencies#usedTypeTraverser`.
+ *
+ * TODO: sbt does not store the full representation that we compute, instead it
+ * hashes parts of it to reduce memory usage, then to see if something changed,
+ * it compares the hashes instead of comparing the representations. We should
+ * investigate whether we can just directly compute hashes in this phase
+ * without going through an intermediate representation, see
+ * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation
+ */
+private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder {
+ import tpd._
+ import xsbti.api
+
+ /** This cache is necessary for correctness, see the comment about inherited
+ * members in `apiClassStructure`
+ */
+ private[this] val classLikeCache = new mutable.HashMap[ClassSymbol, api.ClassLike]
+ /** This cache is optional, it avoids recomputing representations */
+ private[this] val typeCache = new mutable.HashMap[Type, api.Type]
+
+ private[this] object Constants {
+ val emptyStringArray = Array[String]()
+ val local = new api.ThisQualifier
+ val public = new api.Public
+ val privateLocal = new api.Private(local)
+ val protectedLocal = new api.Protected(local)
+ val unqualified = new api.Unqualified
+ val thisPath = new api.This
+ val emptyType = new api.EmptyType
+ val emptyModifiers =
+ new api.Modifiers(false, false, false, false, false,false, false, false)
+ }
+
+ /** Some Dotty types do not have a corresponding type in xsbti.api.* that
+ * represents them. Until this is fixed we can workaround this by using
+ * special annotations that can never appear in the source code to
+ * represent these types.
+ *
+ * @param tp An approximation of the type we're trying to represent
+ * @param marker A special annotation to differentiate our type
+ */
+ private def withMarker(tp: api.Type, marker: api.Annotation) =
+ new api.Annotated(tp, Array(marker))
+ private def marker(name: String) =
+ new api.Annotation(new api.Constant(Constants.emptyType, name), Array())
+ val orMarker = marker("Or")
+ val byNameMarker = marker("ByName")
+
+
+ /** Extract the API representation of a source file */
+ def apiSource(tree: Tree): api.SourceAPI = {
+ val classes = new mutable.ListBuffer[api.ClassLike]
+ def apiClasses(tree: Tree): Unit = tree match {
+ case PackageDef(_, stats) =>
+ stats.foreach(apiClasses)
+ case tree: TypeDef =>
+ classes += apiClass(tree.symbol.asClass)
+ case _ =>
+ }
+
+ apiClasses(tree)
+ forceThunks()
+ new api.SourceAPI(Array(), classes.toArray)
+ }
+
+ def apiClass(sym: ClassSymbol): api.ClassLike =
+ classLikeCache.getOrElseUpdate(sym, computeClass(sym))
+
+ private def computeClass(sym: ClassSymbol): api.ClassLike = {
+ import xsbti.api.{DefinitionType => dt}
+ val defType =
+ if (sym.is(Trait)) dt.Trait
+ else if (sym.is(ModuleClass)) {
+ if (sym.is(PackageClass)) dt.PackageModule
+ else dt.Module
+ } else dt.ClassDef
+
+ val selfType = apiType(sym.classInfo.givenSelfType)
+
+ val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName
+
+ val tparams = sym.typeParams.map(apiTypeParameter)
+
+ val structure = apiClassStructure(sym)
+
+ new api.ClassLike(
+ defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray,
+ tparams.toArray, name.toString, apiAccess(sym), apiModifiers(sym),
+ apiAnnotations(sym).toArray)
+ }
+
+ private[this] val LegacyAppClass = ctx.requiredClass("dotty.runtime.LegacyApp")
+
+ def apiClassStructure(csym: ClassSymbol): api.Structure = {
+ val cinfo = csym.classInfo
+
+ val bases = linearizedAncestorTypes(cinfo)
+ val apiBases = bases.map(apiType)
+
+ // Synthetic methods that are always present do not affect the API
+ // and can therefore be ignored.
+ def alwaysPresent(s: Symbol) =
+ s.isCompanionMethod || (csym.is(ModuleClass) && s.isConstructor)
+ val decls = cinfo.decls.filterNot(alwaysPresent).toList
+ val apiDecls = apiDefinitions(decls)
+
+ val declSet = decls.toSet
+ // TODO: We shouldn't have to compute inherited members. Instead, `Structure`
+ // should have a lazy `parentStructures` field.
+ val inherited = cinfo.baseClasses
+ // We cannot filter out `LegacyApp` because it contains the main method,
+ // see the comment about main class discovery in `computeType`.
+ .filter(bc => !bc.is(Scala2x) || bc.eq(LegacyAppClass))
+ .flatMap(_.classInfo.decls.filterNot(s => s.is(Private) || declSet.contains(s)))
+ // Inherited members need to be computed lazily because a class might contain
+ // itself as an inherited member, like in `class A { class B extends A }`,
+ // this works because of `classLikeCache`
+ val apiInherited = lzy(apiDefinitions(inherited).toArray)
+
+ new api.Structure(strict2lzy(apiBases.toArray), strict2lzy(apiDecls.toArray), apiInherited)
+ }
+
+ def linearizedAncestorTypes(info: ClassInfo): List[Type] = {
+ val ref = info.fullyAppliedRef
+ // Note that the ordering of classes in `baseClasses` is important.
+ info.baseClasses.tail.map(ref.baseTypeWithArgs)
+ }
+
+ def apiDefinitions(defs: List[Symbol]): List[api.Definition] = {
+ // The hash generated by sbt for definitions is supposed to be symmetric so
+ // we shouldn't have to sort them, but it actually isn't symmetric for
+ // definitions which are classes, therefore we need to sort classes to
+ // ensure a stable hash.
+ // Modules and classes come first and are sorted by name, all other
+ // definitions come later and are not sorted.
+ object classFirstSort extends Ordering[Symbol] {
+ override def compare(a: Symbol, b: Symbol) = {
+ val aIsClass = a.isClass
+ val bIsClass = b.isClass
+ if (aIsClass == bIsClass) {
+ if (aIsClass) {
+ if (a.is(Module) == b.is(Module))
+ a.fullName.toString.compareTo(b.fullName.toString)
+ else if (a.is(Module))
+ -1
+ else
+ 1
+ } else
+ 0
+ } else if (aIsClass)
+ -1
+ else
+ 1
+ }
+ }
+
+ defs.sorted(classFirstSort).map(apiDefinition)
+ }
+
+ def apiDefinition(sym: Symbol): api.Definition = {
+ if (sym.isClass) {
+ apiClass(sym.asClass)
+ } else if (sym.isType) {
+ apiTypeMember(sym.asType)
+ } else if (sym.is(Mutable, butNot = Accessor)) {
+ new api.Var(apiType(sym.info), sym.name.toString,
+ apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray)
+ } else if (sym.isStable) {
+ new api.Val(apiType(sym.info), sym.name.toString,
+ apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray)
+ } else {
+ apiDef(sym.asTerm)
+ }
+ }
+
+ def apiDef(sym: TermSymbol): api.Def = {
+ def paramLists(t: Type, start: Int = 0): List[api.ParameterList] = t match {
+ case pt: PolyType =>
+ assert(start == 0)
+ paramLists(pt.resultType)
+ case mt @ MethodType(pnames, ptypes) =>
+ // TODO: We shouldn't have to work so hard to find the default parameters
+ // of a method, Dotty should expose a convenience method for that, see #1143
+ val defaults =
+ if (sym.is(DefaultParameterized)) {
+ val qual =
+ if (sym.isClassConstructor)
+ sym.owner.companionModule // default getters for class constructors are found in the companion object
+ else
+ sym.owner
+ (0 until pnames.length).map(i => qual.info.member(sym.name.defaultGetterName(start + i)).exists)
+ } else
+ (0 until pnames.length).map(Function.const(false))
+ val params = (pnames, ptypes, defaults).zipped.map((pname, ptype, isDefault) =>
+ new api.MethodParameter(pname.toString, apiType(ptype),
+ isDefault, api.ParameterModifier.Plain))
+ new api.ParameterList(params.toArray, mt.isImplicit) :: paramLists(mt.resultType, params.length)
+ case _ =>
+ Nil
+ }
+
+ val tparams = sym.info match {
+ case pt: PolyType =>
+ (pt.paramNames, pt.paramBounds).zipped.map((pname, pbounds) =>
+ apiTypeParameter(pname.toString, 0, pbounds.lo, pbounds.hi))
+ case _ =>
+ Nil
+ }
+ val vparamss = paramLists(sym.info)
+ val retTp = sym.info.finalResultType.widenExpr
+
+ new api.Def(vparamss.toArray, apiType(retTp), tparams.toArray,
+ sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray)
+ }
+
+ def apiTypeMember(sym: TypeSymbol): api.TypeMember = {
+ val typeParams = Array[api.TypeParameter]()
+ val name = sym.name.toString
+ val access = apiAccess(sym)
+ val modifiers = apiModifiers(sym)
+ val as = apiAnnotations(sym)
+ val tpe = sym.info
+
+ if (sym.isAliasType)
+ new api.TypeAlias(apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.toArray)
+ else {
+ assert(sym.isAbstractType)
+ new api.TypeDeclaration(apiType(tpe.bounds.lo), apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.to)
+ }
+ }
+
+ def apiType(tp: Type): api.Type = {
+ typeCache.getOrElseUpdate(tp, computeType(tp))
+ }
+
+ private def computeType(tp: Type): api.Type = {
+ // TODO: Never dealias. We currently have to dealias because
+ // sbt main class discovery relies on the signature of the main
+ // method being fully dealiased. See https://github.com/sbt/zinc/issues/102
+ val tp2 = if (!tp.isHK) tp.dealias else tp
+ tp2 match {
+ case NoPrefix | NoType =>
+ Constants.emptyType
+ case tp: NamedType =>
+ val sym = tp.symbol
+ // Normalize package prefix to avoid instability of representation
+ val prefix = if (sym.isClass && sym.owner.is(Package))
+ sym.owner.thisType
+ else
+ tp.prefix
+ new api.Projection(simpleType(prefix), sym.name.toString)
+ case TypeApplications.AppliedType(tycon, args) =>
+ def processArg(arg: Type): api.Type = arg match {
+ case arg @ TypeBounds(lo, hi) => // Handle wildcard parameters
+ if (lo.eq(defn.NothingType) && hi.eq(defn.AnyType))
+ Constants.emptyType
+ else {
+ val name = "_"
+ val ref = new api.ParameterRef(name)
+ new api.Existential(ref,
+ Array(apiTypeParameter(name, arg.variance, lo, hi)))
+ }
+ case _ =>
+ apiType(arg)
+ }
+
+ val apiTycon = simpleType(tycon)
+ val apiArgs = args.map(processArg)
+ new api.Parameterized(apiTycon, apiArgs.toArray)
+ case PolyType(tparams, res) =>
+ val apiTparams = tparams.map(apiTypeParameter)
+ val apiRes = apiType(res)
+ new api.Polymorphic(apiRes, apiTparams.toArray)
+ case rt: RefinedType =>
+ val name = rt.refinedName.toString
+ val parent = apiType(rt.parent)
+
+ def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match {
+ case TypeAlias(alias) =>
+ new api.TypeAlias(apiType(alias),
+ Array(), name, Constants.public, Constants.emptyModifiers, Array())
+ case TypeBounds(lo, hi) =>
+ new api.TypeDeclaration(apiType(lo), apiType(hi),
+ Array(), name, Constants.public, Constants.emptyModifiers, Array())
+ }
+
+ val decl: Array[api.Definition] = rt.refinedInfo match {
+ case rinfo: TypeBounds =>
+ Array(typeRefinement(name, rinfo))
+ case _ =>
+ ctx.debuglog(i"sbt-api: skipped structural refinement in $rt")
+ Array()
+ }
+ new api.Structure(strict2lzy(Array(parent)), strict2lzy(decl), strict2lzy(Array()))
+ case tp: RecType =>
+ apiType(tp.parent)
+ case RecThis(recType) =>
+ // `tp` must be present inside `recType`, so calling `apiType` on
+ // `recType` would lead to an infinite recursion, we avoid this by
+ // computing the representation of `recType` lazily.
+ apiLazy(recType)
+ case tp: AndOrType =>
+ val parents = List(apiType(tp.tp1), apiType(tp.tp2))
+
+ // TODO: Add a real representation for AndOrTypes in xsbti. The order of
+ // types in an `AndOrType` does not change the API, so the API hash should
+ // be symmetric.
+ val s = new api.Structure(strict2lzy(parents.toArray), strict2lzy(Array()), strict2lzy(Array()))
+ if (tp.isAnd)
+ s
+ else
+ withMarker(s, orMarker)
+ case ExprType(resultType) =>
+ withMarker(apiType(resultType), byNameMarker)
+ case ConstantType(constant) =>
+ new api.Constant(apiType(constant.tpe), constant.stringValue)
+ case AnnotatedType(tpe, annot) =>
+ // TODO: Annotation support
+ ctx.debuglog(i"sbt-api: skipped annotation in $tp2")
+ apiType(tpe)
+ case tp: ThisType =>
+ apiThis(tp.cls)
+ case tp: ParamType =>
+ // TODO: Distinguishing parameters based on their names alone is not enough,
+ // the binder is also needed (at least for type lambdas).
+ new api.ParameterRef(tp.paramName.toString)
+ case tp: LazyRef =>
+ apiType(tp.ref)
+ case tp: TypeVar =>
+ apiType(tp.underlying)
+ case _ => {
+ ctx.warning(i"sbt-api: Unhandled type ${tp.getClass} : $tp")
+ Constants.emptyType
+ }
+ }
+ }
+
+ // TODO: Get rid of this method. See https://github.com/sbt/zinc/issues/101
+ def simpleType(tp: Type): api.SimpleType = apiType(tp) match {
+ case tp: api.SimpleType =>
+ tp
+ case _ =>
+ ctx.debuglog("sbt-api: Not a simple type: " + tp.show)
+ Constants.emptyType
+ }
+
+ def apiLazy(tp: => Type): api.Type = {
+ // TODO: The sbt api needs a convenient way to make a lazy type.
+ // For now, we repurpose Structure for this.
+ val apiTp = lzy(Array(apiType(tp)))
+ new api.Structure(apiTp, strict2lzy(Array()), strict2lzy(Array()))
+ }
+
+ def apiThis(sym: Symbol): api.Singleton = {
+ val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot)
+ .map(s => new api.Id(s.name.toString))
+ new api.Singleton(new api.Path(pathComponents.toArray.reverse ++ Array(Constants.thisPath)))
+ }
+
+ def apiTypeParameter(tparam: TypeParamInfo): api.TypeParameter =
+ apiTypeParameter(tparam.paramName.toString, tparam.paramVariance,
+ tparam.paramBounds.lo, tparam.paramBounds.hi)
+
+ def apiTypeParameter(name: String, variance: Int, lo: Type, hi: Type): api.TypeParameter =
+ new api.TypeParameter(name, Array(), Array(), apiVariance(variance),
+ apiType(lo), apiType(hi))
+
+ def apiVariance(v: Int): api.Variance = {
+ import api.Variance._
+ if (v < 0) Contravariant
+ else if (v > 0) Covariant
+ else Invariant
+ }
+
+ def apiAccess(sym: Symbol): api.Access = {
+ // Symbols which are private[foo] do not have the flag Private set,
+ // but their `privateWithin` exists, see `Parsers#ParserCommon#normalize`.
+ if (!sym.is(Protected | Private) && !sym.privateWithin.exists)
+ Constants.public
+ else if (sym.is(PrivateLocal))
+ Constants.privateLocal
+ else if (sym.is(ProtectedLocal))
+ Constants.protectedLocal
+ else {
+ val qualifier =
+ if (sym.privateWithin eq NoSymbol)
+ Constants.unqualified
+ else
+ new api.IdQualifier(sym.privateWithin.fullName.toString)
+ if (sym.is(Protected))
+ new api.Protected(qualifier)
+ else
+ new api.Private(qualifier)
+ }
+ }
+
+ def apiModifiers(sym: Symbol): api.Modifiers = {
+ val absOver = sym.is(AbsOverride)
+ val abs = sym.is(Abstract) || sym.is(Deferred) || absOver
+ val over = sym.is(Override) || absOver
+ new api.Modifiers(abs, over, sym.is(Final), sym.is(Sealed),
+ sym.is(Implicit), sym.is(Lazy), sym.is(Macro), sym.is(SuperAccessor))
+ }
+
+ // TODO: Support other annotations
+ def apiAnnotations(s: Symbol): List[api.Annotation] = {
+ val annots = new mutable.ListBuffer[api.Annotation]
+
+ if (Inliner.hasBodyToInline(s)) {
+ // FIXME: If the body of an inline method changes, all the reverse
+ // dependencies of this method need to be recompiled. sbt has no way
+ // of tracking method bodies, so as a hack we include the pretty-printed
+ // typed tree of the method as part of the signature we send to sbt.
+ // To do this properly we would need a way to hash trees and types in
+ // dotty itself.
+ val printTypesCtx = ctx.fresh.setSetting(ctx.settings.printtypes, true)
+ annots += marker(Inliner.bodyToInline(s).show(printTypesCtx).toString)
+ }
+
+ annots.toList
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
new file mode 100644
index 000000000..229e35360
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
@@ -0,0 +1,268 @@
+package dotty.tools.dotc
+package sbt
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._
+import Names._, NameOps._, StdNames._
+
+import scala.collection.{Set, mutable}
+
+import dotty.tools.io.{AbstractFile, Path, PlainFile, ZipArchive}
+import java.io.File
+
+import java.util.{Arrays, Comparator}
+
+import xsbti.DependencyContext
+
+/** This phase sends information on classes' dependencies to sbt via callbacks.
+ *
+ * This is used by sbt for incremental recompilation. Briefly, when a file
+ * changes sbt will recompile it, if its API has changed (determined by what
+ * `ExtractAPI` sent) then sbt will determine which reverse-dependencies
+ * (determined by what `ExtractDependencies` sent) of the API have to be
+ * recompiled depending on what changed.
+ *
+ * See the documentation of `ExtractDependenciesCollector`, `ExtractAPI`,
+ * `ExtractAPICollector` and
+ * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html for more
+ * information on how sbt incremental compilation works.
+ *
+ * The following flags affect this phase:
+ * -Yforce-sbt-phases
+ * -Ydump-sbt-inc
+ *
+ * @see ExtractAPI
+ */
+class ExtractDependencies extends Phase {
+ override def phaseName: String = "sbt-deps"
+
+ // This phase should be run directly after `Frontend`, if it is run after
+ // `PostTyper`, some dependencies will be lost because trees get simplified.
+ // See the scripted test `constants` for an example where this matters.
+ // TODO: Add a `Phase#runsBefore` method ?
+
+ override def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ val dumpInc = ctx.settings.YdumpSbtInc.value
+ val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value
+ if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) {
+ val sourceFile = unit.source.file.file
+ val extractDeps = new ExtractDependenciesCollector
+ extractDeps.traverse(unit.tpdTree)
+
+ if (dumpInc) {
+ val names = extractDeps.usedNames.map(_.toString).toArray[Object]
+ val deps = extractDeps.topLevelDependencies.map(_.toString).toArray[Object]
+ val inhDeps = extractDeps.topLevelInheritanceDependencies.map(_.toString).toArray[Object]
+ Arrays.sort(names)
+ Arrays.sort(deps)
+ Arrays.sort(inhDeps)
+
+ val pw = Path(sourceFile).changeExtension("inc").toFile.printWriter()
+ try {
+ pw.println(s"// usedNames: ${names.mkString(",")}")
+ pw.println(s"// topLevelDependencies: ${deps.mkString(",")}")
+ pw.println(s"// topLevelInheritanceDependencies: ${inhDeps.mkString(",")}")
+ } finally pw.close()
+ }
+
+ if (ctx.sbtCallback != null) {
+ extractDeps.usedNames.foreach(name =>
+ ctx.sbtCallback.usedName(sourceFile, name.toString))
+ extractDeps.topLevelDependencies.foreach(dep =>
+ recordDependency(sourceFile, dep, DependencyContext.DependencyByMemberRef))
+ extractDeps.topLevelInheritanceDependencies.foreach(dep =>
+ recordDependency(sourceFile, dep, DependencyContext.DependencyByInheritance))
+ }
+ }
+ }
+
+ /** Record that `currentSourceFile` depends on the file where `dep` was loaded from.
+ *
+ * @param currentSourceFile The source file of the current unit
+ * @param dep The dependency
+ * @param context Describes how `currentSourceFile` depends on `dep`
+ */
+ def recordDependency(currentSourceFile: File, dep: Symbol, context: DependencyContext)
+ (implicit ctx: Context) = {
+ val depFile = dep.associatedFile
+ if (depFile != null) {
+ if (depFile.path.endsWith(".class")) {
+ /** Transform `List(java, lang, String.class)` into `java.lang.String` */
+ def className(classSegments: List[String]) =
+ classSegments.mkString(".").stripSuffix(".class")
+ def binaryDependency(file: File, className: String) =
+ ctx.sbtCallback.binaryDependency(file, className, currentSourceFile, context)
+
+ depFile match {
+ case ze: ZipArchive#Entry =>
+ for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) {
+ val classSegments = Path(ze.path).segments
+ binaryDependency(zipFile, className(classSegments))
+ }
+ case pf: PlainFile =>
+ val packages = dep.ownersIterator
+ .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length
+ // We can recover the fully qualified name of a classfile from
+ // its path
+ val classSegments = pf.givenPath.segments.takeRight(packages + 1)
+ binaryDependency(pf.file, className(classSegments))
+ case _ =>
+ }
+ } else if (depFile.file != currentSourceFile) {
+ ctx.sbtCallback.sourceDependency(depFile.file, currentSourceFile, context)
+ }
+ }
+ }
+}
+
+/** Extract the dependency information of a compilation unit.
+ *
+ * To understand why we track the used names see the section "Name hashing
+ * algorithm" in http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html
+ * To understand why we need to track dependencies introduced by inheritance
+ * specially, see the subsection "Dependencies introduced by member reference and
+ * inheritance" in the "Name hashing algorithm" section.
+ */
+private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser {
+ import tpd._
+
+ private[this] val _usedNames = new mutable.HashSet[Name]
+ private[this] val _topLevelDependencies = new mutable.HashSet[Symbol]
+ private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol]
+
+ /** The names used in this class, this does not include names which are only
+ * defined and not referenced.
+ */
+ def usedNames: Set[Name] = _usedNames
+
+ /** The set of top-level classes that the compilation unit depends on
+ * because it refers to these classes or something defined in them.
+ * This is always a superset of `topLevelInheritanceDependencies` by definition.
+ */
+ def topLevelDependencies: Set[Symbol] = _topLevelDependencies
+
+ /** The set of top-level classes that the compilation unit extends or that
+ * contain a non-top-level class that the compilaion unit extends.
+ */
+ def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies
+
+ private def addUsedName(name: Name) =
+ _usedNames += name
+
+ private def addDependency(sym: Symbol): Unit =
+ if (!ignoreDependency(sym)) {
+ val tlClass = sym.topLevelClass
+ if (tlClass.ne(NoSymbol)) // Some synthetic type aliases like AnyRef do not belong to any class
+ _topLevelDependencies += sym.topLevelClass
+ addUsedName(sym.name)
+ }
+
+ private def ignoreDependency(sym: Symbol) =
+ sym.eq(NoSymbol) ||
+ sym.isEffectiveRoot ||
+ sym.isAnonymousFunction ||
+ sym.isAnonymousClass
+
+ private def addInheritanceDependency(sym: Symbol): Unit =
+ _topLevelInheritanceDependencies += sym.topLevelClass
+
+ /** Traverse the tree of a source file and record the dependencies which
+ * can be retrieved using `topLevelDependencies`, `topLevelInheritanceDependencies`,
+ * and `usedNames`
+ */
+ override def traverse(tree: Tree)(implicit ctx: Context): Unit = {
+ tree match {
+ case Import(expr, selectors) =>
+ def lookupImported(name: Name) = expr.tpe.member(name).symbol
+ def addImported(name: Name) = {
+ // importing a name means importing both a term and a type (if they exist)
+ addDependency(lookupImported(name.toTermName))
+ addDependency(lookupImported(name.toTypeName))
+ }
+ selectors foreach {
+ case Ident(name) =>
+ addImported(name)
+ case Thicket(Ident(name) :: Ident(rename) :: Nil) =>
+ addImported(name)
+ if (rename ne nme.WILDCARD)
+ addUsedName(rename)
+ case _ =>
+ }
+ case Inlined(call, _, _) =>
+ // The inlined call is normally ignored by TreeTraverser but we need to
+ // record it as a dependency
+ traverse(call)
+ case t: TypeTree =>
+ usedTypeTraverser.traverse(t.tpe)
+ case ref: RefTree =>
+ addDependency(ref.symbol)
+ usedTypeTraverser.traverse(ref.tpe)
+ case t @ Template(_, parents, _, _) =>
+ t.parents.foreach(p => addInheritanceDependency(p.tpe.typeSymbol))
+ case _ =>
+ }
+ traverseChildren(tree)
+ }
+
+ /** Traverse a used type and record all the dependencies we need to keep track
+ * of for incremental recompilation.
+ *
+ * As a motivating example, given a type `T` defined as:
+ *
+ * type T >: L <: H
+ * type L <: A1
+ * type H <: B1
+ * class A1 extends A0
+ * class B1 extends B0
+ *
+ * We need to record a dependency on `T`, `L`, `H`, `A1`, `B1`. This is
+ * necessary because the API representation that `ExtractAPI` produces for
+ * `T` just refers to the strings "L" and "H", it does not contain their API
+ * representation. Therefore, the name hash of `T` does not change if for
+ * example the definition of `L` changes.
+ *
+ * We do not need to keep track of superclasses like `A0` and `B0` because
+ * the API representation of a class (and therefore its name hash) already
+ * contains all necessary information on superclasses.
+ *
+ * A natural question to ask is: Since traversing all referenced types to
+ * find all these names is costly, why not change the API representation
+ * produced by `ExtractAPI` to contain that information? This way the name
+ * hash of `T` would change if any of the types it depends on change, and we
+ * would only need to record a dependency on `T`. Unfortunately there is no
+ * simple answer to the question "what does T depend on?" because it depends
+ * on the prefix and `ExtractAPI` does not compute types as seen from every
+ * possible prefix, the documentation of `ExtractAPI` explains why.
+ *
+ * The tests in sbt `types-in-used-names-a`, `types-in-used-names-b`,
+ * `as-seen-from-a` and `as-seen-from-b` rely on this.
+ */
+ private object usedTypeTraverser extends TypeTraverser {
+ val seen = new mutable.HashSet[Type]
+ def traverse(tp: Type): Unit = if (!seen.contains(tp)) {
+ seen += tp
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (!sym.is(Package)) {
+ addDependency(sym)
+ if (!sym.isClass)
+ traverse(tp.info)
+ traverse(tp.prefix)
+ }
+ case tp: ThisType =>
+ traverse(tp.underlying)
+ case tp: ConstantType =>
+ traverse(tp.underlying)
+ case tp: MethodParam =>
+ traverse(tp.underlying)
+ case tp: PolyParam =>
+ traverse(tp.underlying)
+ case _ =>
+ traverseChildren(tp)
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala
new file mode 100644
index 000000000..0e6b19867
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala
@@ -0,0 +1,156 @@
+// This file is copied straight from
+// https://github.com/sbt/sbt/blob/0.13/compile/api/src/main/scala/xsbt/api/ShowAPI.scala
+// It is convenient to be able to pretty-print the API from Dotty itself to test
+// the sbt phase without having to run sbt.
+
+/* sbt -- Simple Build Tool
+ * Copyright 2010 Mark Harrah
+ */
+package dotty.tools.dotc
+package sbt
+
+import xsbti.api._
+
+import scala.util.Try
+
+object DefaultShowAPI {
+ private lazy val defaultNesting = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.depth").get) } getOrElse 2
+
+ def apply(d: Definition) = ShowAPI.showDefinition(d)(defaultNesting)
+ def apply(d: Type) = ShowAPI.showType(d)(defaultNesting)
+ def apply(a: SourceAPI) = ShowAPI.showApi(a)(defaultNesting)
+}
+
+object ShowAPI {
+ private lazy val numDecls = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.decls").get) } getOrElse 0
+
+ private def truncateDecls(decls: Array[Definition]): Array[Definition] = if (numDecls <= 0) decls else decls.take(numDecls)
+ private def lines(ls: Seq[String]): String = ls.mkString("\n", "\n", "\n")
+
+ def showApi(a: SourceAPI)(implicit nesting: Int) =
+ a.packages.map(pkg => "package " + pkg.name).mkString("\n") + lines(truncateDecls(a.definitions).map(showDefinition))
+
+ def showDefinition(d: Definition)(implicit nesting: Int): String = d match {
+ case v: Val => showMonoDef(v, "val") + ": " + showType(v.tpe)
+ case v: Var => showMonoDef(v, "var") + ": " + showType(v.tpe)
+ case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters) + ": " + showType(d.returnType)
+ case ta: TypeAlias => showPolyDef(ta, "type") + " = " + showType(ta.tpe)
+ case td: TypeDeclaration => showPolyDef(td, "type") + showBounds(td.lowerBound, td.upperBound)
+ case cl: ClassLike => showPolyDef(cl, showDefinitionType(cl.definitionType)) + " extends " + showTemplate(cl)
+ }
+
+ private def showTemplate(cl: ClassLike)(implicit nesting: Int) =
+ if (nesting <= 0) "<nesting level reached>"
+ else {
+ val showSelf = if (cl.selfType.isInstanceOf[EmptyType]) "" else " self: " + showNestedType(cl.selfType) + " =>"
+
+ cl.structure.parents.map(showNestedType).mkString("", " with ", " {") + showSelf +
+ lines(truncateDecls(cl.structure.inherited).map(d => "^inherited^ " + showNestedDefinition(d))) +
+ lines(truncateDecls(cl.structure.declared).map(showNestedDefinition)) +
+ "}"
+ }
+
+ def showType(t: Type)(implicit nesting: Int): String = t match {
+ case st: Projection => showType(st.prefix) + "#" + st.id
+ case st: ParameterRef => "<" + st.id + ">"
+ case st: Singleton => showPath(st.path)
+ case st: EmptyType => "<empty>"
+ case p: Parameterized => showType(p.baseType) + p.typeArguments.map(showType).mkString("[", ", ", "]")
+ case c: Constant => showType(c.baseType) + "(" + c.value + ")"
+ case a: Annotated => showAnnotations(a.annotations) + " " + showType(a.baseType)
+ case s: Structure =>
+ s.parents.map(showType).mkString(" with ") + (
+ if (nesting <= 0) "{ <nesting level reached> }"
+ else truncateDecls(s.declared).map(showNestedDefinition).mkString(" {", "\n", "}"))
+ case e: Existential =>
+ showType(e.baseType) + (
+ if (nesting <= 0) " forSome { <nesting level reached> }"
+ else e.clause.map(t => "type " + showNestedTypeParameter(t)).mkString(" forSome { ", "; ", " }"))
+ case p: Polymorphic => showType(p.baseType) + (
+ if (nesting <= 0) " [ <nesting level reached> ]"
+ else showNestedTypeParameters(p.parameters))
+ }
+
+ private def showPath(p: Path): String = p.components.map(showPathComponent).mkString(".")
+ private def showPathComponent(pc: PathComponent) = pc match {
+ case s: Super => "super[" + showPath(s.qualifier) + "]"
+ case _: This => "this"
+ case i: Id => i.id
+ }
+
+ private def space(s: String) = if (s.isEmpty) s else s + " "
+ private def showMonoDef(d: Definition, label: String)(implicit nesting: Int): String =
+ space(showAnnotations(d.annotations)) + space(showAccess(d.access)) + space(showModifiers(d.modifiers)) + space(label) + d.name
+
+ private def showPolyDef(d: ParameterizedDefinition, label: String)(implicit nesting: Int): String =
+ showMonoDef(d, label) + showTypeParameters(d.typeParameters)
+
+ private def showTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int): String =
+ if (tps.isEmpty) ""
+ else tps.map(showTypeParameter).mkString("[", ", ", "]")
+
+ private def showTypeParameter(tp: TypeParameter)(implicit nesting: Int): String =
+ showAnnotations(tp.annotations) + " " + showVariance(tp.variance) + tp.id + showTypeParameters(tp.typeParameters) + " " + showBounds(tp.lowerBound, tp.upperBound)
+
+ private def showAnnotations(as: Seq[Annotation])(implicit nesting: Int) = as.map(showAnnotation).mkString(" ")
+ private def showAnnotation(a: Annotation)(implicit nesting: Int) =
+ "@" + showType(a.base) + (
+ if (a.arguments.isEmpty) ""
+ else a.arguments.map(a => a.name + " = " + a.value).mkString("(", ", ", ")")
+ )
+
+ private def showBounds(lower: Type, upper: Type)(implicit nesting: Int): String = ">: " + showType(lower) + " <: " + showType(upper)
+
+ private def showValueParams(ps: Seq[ParameterList])(implicit nesting: Int): String =
+ ps.map(pl =>
+ pl.parameters.map(mp =>
+ mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")
+ ).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")")
+ ).mkString("")
+
+ private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match {
+ case ParameterModifier.Plain => base
+ case ParameterModifier.Repeated => base + "*"
+ case ParameterModifier.ByName => "=> " + base
+ }
+
+ private def showDefinitionType(d: DefinitionType) = d match {
+ case DefinitionType.Trait => "trait"
+ case DefinitionType.ClassDef => "class"
+ case DefinitionType.Module => "object"
+ case DefinitionType.PackageModule => "package object"
+ }
+
+ private def showAccess(a: Access) = a match {
+ case p: Public => ""
+ case p: Protected => "protected" + showQualifier(p.qualifier)
+ case p: Private => "private" + showQualifier(p.qualifier)
+ }
+
+ private def showQualifier(q: Qualifier) = q match {
+ case _: Unqualified => ""
+ case _: ThisQualifier => "[this]"
+ case i: IdQualifier => "[" + i.value + "]"
+ }
+
+ private def showModifiers(m: Modifiers) = List(
+ (m.isOverride, "override"),
+ (m.isFinal, "final"),
+ (m.isSealed, "sealed"),
+ (m.isImplicit, "implicit"),
+ (m.isAbstract, "abstract"),
+ (m.isLazy, "lazy")
+ ).collect { case (true, mod) => mod }.mkString(" ")
+
+ private def showVariance(v: Variance) = v match {
+ case Variance.Invariant => ""
+ case Variance.Covariant => "+"
+ case Variance.Contravariant => "-"
+ }
+
+ // limit nesting to prevent cycles and generally keep output from getting humongous
+ private def showNestedType(tp: Type)(implicit nesting: Int) = showType(tp)(nesting - 1)
+ private def showNestedTypeParameter(tp: TypeParameter)(implicit nesting: Int) = showTypeParameter(tp)(nesting - 1)
+ private def showNestedTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int) = showTypeParameters(tps)(nesting - 1)
+ private def showNestedDefinition(d: Definition)(implicit nesting: Int) = showDefinition(d)(nesting - 1)
+}
diff --git a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala
new file mode 100644
index 000000000..e377de6da
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala
@@ -0,0 +1,61 @@
+package dotty.tools.dotc
+package sbt
+
+import scala.annotation.tailrec
+import scala.collection.mutable.ListBuffer
+import xsbti.api
+
+/** Create and hold thunks. A thunk is a (potentially) unevaluated value
+ * that may be evaluated once.
+ */
+private[sbt] trait ThunkHolder {
+ private[this] val thunks = new ListBuffer[api.Lazy[_]]
+
+ /** Force all unevaluated thunks to prevent space leaks. */
+ @tailrec protected final def forceThunks(): Unit = if (!thunks.isEmpty) {
+ val toForce = thunks.toList
+ thunks.clear()
+ toForce.foreach(_.get())
+ // Forcing thunks may create new thunks
+ forceThunks()
+ }
+
+ /** Store the by-name parameter `s` in a `Lazy` container without evaluating it.
+ * It will be forced by the next call to `forceThunks()`
+ */
+ def lzy[T <: AnyRef](t: => T): api.Lazy[T] = {
+ val l = SafeLazy(() => t)
+ thunks += l
+ l
+ }
+
+ /** Store the parameter `s` in a `Lazy` container, since `s` is not by-name, there
+ * is nothing to force.
+ *
+ * TODO: Get rid of this method. It is only needed because some xsbti.api classes
+ * take lazy arguments when they could be strict, but this can be fixed in sbt,
+ * see https://github.com/sbt/zinc/issues/114
+ */
+ def strict2lzy[T <: AnyRef](t: T): api.Lazy[T] =
+ SafeLazy.strict(t)
+}
+
+// TODO: Use xsbti.SafeLazy once https://github.com/sbt/zinc/issues/113 is fixed
+private object SafeLazy {
+ def apply[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] =
+ new Impl(eval)
+
+ def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] =
+ new Strict(value)
+
+ private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] {
+ private[this] lazy val _t = {
+ val t = eval()
+ eval = null // clear the reference, ensuring the only memory we hold onto is the result
+ t
+ }
+ def get: T = _t
+ }
+
+ private[this] final class Strict[T <: AnyRef](val get: T) extends xsbti.api.Lazy[T] with java.io.Serializable
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
new file mode 100644
index 000000000..74213d332
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
@@ -0,0 +1,59 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import Contexts.Context
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import util.Positions._
+import Names._
+
+import collection.mutable
+import ResolveSuper._
+
+import scala.collection.immutable.::
+
+
+/** This phase rewrites calls to array constructors to newArray method in Dotty.runtime.Arrays module.
+ *
+ * It assummes that generic arrays have already been handled by typer(see Applications.convertNewGenericArray).
+ * Additionally it optimizes calls to scala.Array.ofDim functions by replacing them with calls to newArray with specific dimensions
+ */
+class ArrayConstructors extends MiniPhaseTransform { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "arrayConstructors"
+
+ override def transformApply(tree: tpd.Apply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ def rewrite(elemType: Type, dims: List[Tree]) =
+ tpd.newArray(elemType, tree.tpe, tree.pos, JavaSeqLiteral(dims, TypeTree(defn.IntClass.typeRef)))
+
+ if (tree.fun.symbol eq defn.ArrayConstructor) {
+ val TypeApply(tycon, targ :: Nil) = tree.fun
+ rewrite(targ.tpe, tree.args)
+ } else if ((tree.fun.symbol.maybeOwner eq defn.ArrayModule) && (tree.fun.symbol.name eq nme.ofDim) && !tree.tpe.isInstanceOf[MethodicType]) {
+ val Apply(Apply(TypeApply(_, List(tp)), _), _) = tree
+ val cs = tp.tpe.widen.classSymbol
+ tree.fun match {
+ case Apply(TypeApply(t: Ident, targ), dims)
+ if !TypeErasure.isUnboundedGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) =>
+ rewrite(targ.head.tpe, dims)
+ case Apply(TypeApply(t: Select, targ), dims)
+ if !TypeErasure.isUnboundedGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) =>
+ Block(t.qualifier :: Nil, rewrite(targ.head.tpe, dims))
+ case _ => tree
+ }
+
+ } else tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/AugmentScala2Traits.scala b/compiler/src/dotty/tools/dotc/transform/AugmentScala2Traits.scala
new file mode 100644
index 000000000..9c01aaa9a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/AugmentScala2Traits.scala
@@ -0,0 +1,101 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import Contexts.Context
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import Annotations._
+import StdNames._
+import NameOps._
+import ast.Trees._
+
+/** This phase augments Scala2 traits with implementation classes and with additional members
+ * needed for mixin composition.
+ * These symbols would have been added between Unpickling and Mixin in the Scala2 pipeline.
+ * Specifcally, it adds
+ *
+ * - an implementation class which defines a trait constructor and trait method implementations
+ * - trait setters for vals defined in traits
+ *
+ * Furthermore, it expands the names of all private getters and setters as well as super accessors in the trait and makes
+ * them not-private.
+ */
+class AugmentScala2Traits extends MiniPhaseTransform with IdentityDenotTransformer with FullParameterization { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "augmentScala2Traits"
+
+ override def rewiredTarget(referenced: Symbol, derived: Symbol)(implicit ctx: Context) = NoSymbol
+
+ override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo) = {
+ val cls = impl.symbol.owner.asClass
+ for (mixin <- cls.mixins)
+ if (mixin.is(Scala2x))
+ augmentScala2Trait(mixin, cls)
+ impl
+ }
+
+ private def augmentScala2Trait(mixin: ClassSymbol, cls: ClassSymbol)(implicit ctx: Context): Unit = {
+ if (mixin.implClass.is(Scala2x)) () // nothing to do, mixin was already augmented
+ else {
+ //println(i"creating new implclass for $mixin ${mixin.implClass}")
+ val ops = new MixinOps(cls, thisTransform)
+ import ops._
+
+ val implClass = ctx.newCompleteClassSymbol(
+ owner = mixin.owner,
+ name = mixin.name.implClassName,
+ flags = Abstract | Scala2x,
+ parents = defn.ObjectType :: Nil,
+ assocFile = mixin.assocFile).enteredAfter(thisTransform)
+
+ def implMethod(meth: TermSymbol): Symbol = {
+ val mold =
+ if (meth.isConstructor)
+ meth.copySymDenotation(
+ name = nme.TRAIT_CONSTRUCTOR,
+ info = MethodType(Nil, defn.UnitType))
+ else meth.ensureNotPrivate
+ meth.copy(
+ owner = implClass,
+ name = mold.name.asTermName,
+ flags = Method | JavaStatic | mold.flags & ExpandedName,
+ info = fullyParameterizedType(mold.info, mixin))
+ }
+
+ def traitSetter(getter: TermSymbol) =
+ getter.copy(
+ name = getter.ensureNotPrivate.name
+ .expandedName(getter.owner, nme.TRAIT_SETTER_SEPARATOR)
+ .asTermName.setterName,
+ flags = Method | Accessor | ExpandedName,
+ info = MethodType(getter.info.resultType :: Nil, defn.UnitType))
+
+ for (sym <- mixin.info.decls) {
+ if (needsForwarder(sym) || sym.isConstructor || sym.isGetter && sym.is(Lazy) || sym.is(Method, butNot = Deferred))
+ implClass.enter(implMethod(sym.asTerm))
+ if (sym.isGetter)
+ if (sym.is(Lazy)) {
+ if (!sym.hasAnnotation(defn.VolatileAnnot))
+ sym.addAnnotation(Annotation(defn.VolatileAnnot, Nil))
+ }
+ else if (!sym.is(Deferred) && !sym.setter.exists &&
+ !sym.info.resultType.isInstanceOf[ConstantType])
+ traitSetter(sym.asTerm).enteredAfter(thisTransform)
+ if ((sym.is(PrivateAccessor, butNot = ExpandedName) &&
+ (sym.isGetter || sym.isSetter)) // strangely, Scala 2 fields are also methods that have Accessor set.
+ || sym.is(SuperAccessor)) // scala2 superaccessors are pickled as private, but are compiled as public expanded
+ sym.ensureNotPrivate.installAfter(thisTransform)
+ }
+ ctx.log(i"Scala2x trait decls of $mixin = ${mixin.info.decls.toList.map(_.showDcl)}%\n %")
+ ctx.log(i"Scala2x impl decls of $mixin = ${implClass.info.decls.toList.map(_.showDcl)}%\n %")
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
new file mode 100644
index 000000000..cd05589c3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
@@ -0,0 +1,149 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import core.SymDenotations._
+import core.StdNames.nme
+import core.Names._
+import core.NameOps._
+import ast.Trees._
+import SymUtils._
+import collection.{ mutable, immutable }
+import collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet }
+
+class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "capturedVars"
+ val treeTransform = new Transform(Set())
+
+ private class RefInfo(implicit ctx: Context) {
+ /** The classes for which a Ref type exists. */
+ val refClassKeys: collection.Set[Symbol] =
+ defn.ScalaNumericValueClasses() + defn.BooleanClass + defn.ObjectClass
+
+ val refClass: Map[Symbol, Symbol] =
+ refClassKeys.map(rc => rc -> ctx.requiredClass(s"scala.runtime.${rc.name}Ref")).toMap
+
+ val volatileRefClass: Map[Symbol, Symbol] =
+ refClassKeys.map(rc => rc -> ctx.requiredClass(s"scala.runtime.Volatile${rc.name}Ref")).toMap
+
+ val boxedRefClasses: collection.Set[Symbol] =
+ refClassKeys.flatMap(k => Set(refClass(k), volatileRefClass(k)))
+ }
+
+ class Transform(captured: collection.Set[Symbol]) extends TreeTransform {
+ def phase = thisTransform
+
+ private var myRefInfo: RefInfo = null
+ private def refInfo(implicit ctx: Context) = {
+ if (myRefInfo == null) myRefInfo = new RefInfo()
+ myRefInfo
+ }
+
+ private class CollectCaptured(implicit ctx: Context) extends EnclosingMethodTraverser {
+ private val captured = mutable.HashSet[Symbol]()
+ def traverse(enclMeth: Symbol, tree: Tree)(implicit ctx: Context) = tree match {
+ case id: Ident =>
+ val sym = id.symbol
+ if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm && sym.enclosingMethod != enclMeth) {
+ ctx.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth")
+ captured += sym
+ }
+ case _ =>
+ foldOver(enclMeth, tree)
+ }
+ def runOver(tree: Tree): collection.Set[Symbol] = {
+ apply(NoSymbol, tree)
+ captured
+ }
+ }
+
+ override def prepareForUnit(tree: Tree)(implicit ctx: Context) = {
+ val captured = (new CollectCaptured)(ctx.withPhase(thisTransform))
+ .runOver(ctx.compilationUnit.tpdTree)
+ new Transform(captured)
+ }
+
+ /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`,
+ * depending on whether the reference should be @volatile
+ */
+ def refClass(cls: Symbol, isVolatile: Boolean)(implicit ctx: Context): Symbol = {
+ val refMap = if (isVolatile) refInfo.volatileRefClass else refInfo.refClass
+ if (cls.isClass) {
+ refMap.getOrElse(cls, refMap(defn.ObjectClass))
+ }
+ else refMap(defn.ObjectClass)
+ }
+
+ override def prepareForValDef(vdef: ValDef)(implicit ctx: Context) = {
+ val sym = vdef.symbol
+ if (captured contains sym) {
+ val newd = sym.denot(ctx.withPhase(thisTransform)).copySymDenotation(
+ info = refClass(sym.info.classSymbol, sym.hasAnnotation(defn.VolatileAnnot)).typeRef,
+ initFlags = sym.flags &~ Mutable)
+ newd.removeAnnotation(defn.VolatileAnnot)
+ newd.installAfter(thisTransform)
+ }
+ this
+ }
+
+ override def transformValDef(vdef: ValDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val vble = vdef.symbol
+ if (captured contains vble) {
+ def boxMethod(name: TermName): Tree =
+ ref(vble.info.classSymbol.companionModule.info.member(name).symbol)
+ cpy.ValDef(vdef)(
+ rhs = vdef.rhs match {
+ case EmptyTree => boxMethod(nme.zero).appliedToNone.withPos(vdef.pos)
+ case arg => boxMethod(nme.create).appliedTo(arg)
+ },
+ tpt = TypeTree(vble.info).withPos(vdef.tpt.pos))
+ } else vdef
+ }
+
+ override def transformIdent(id: Ident)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val vble = id.symbol
+ if (captured(vble))
+ (id select nme.elem).ensureConforms(vble.denot(ctx.withPhase(thisTransform)).info)
+ else id
+ }
+
+ /** If assignment is to a boxed ref type, e.g.
+ *
+ * intRef.elem = expr
+ *
+ * rewrite using a temporary var to
+ *
+ * val ev$n = expr
+ * intRef.elem = ev$n
+ *
+ * That way, we avoid the problem that `expr` might contain a `try` that would
+ * run on a non-empty stack (which is illegal under JVM rules). Note that LiftTry
+ * has already run before, so such `try`s would not be eliminated.
+ *
+ * Also: If the ref type lhs is followed by a cast (can be an artifact of nested translation),
+ * drop the cast.
+ */
+ override def transformAssign(tree: Assign)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ def recur(lhs: Tree): Tree = lhs match {
+ case TypeApply(Select(qual, nme.asInstanceOf_), _) =>
+ val Select(_, nme.elem) = qual
+ recur(qual)
+ case Select(_, nme.elem) if refInfo.boxedRefClasses.contains(lhs.symbol.maybeOwner) =>
+ val tempDef = transformFollowing(SyntheticValDef(ctx.freshName("ev$").toTermName, tree.rhs))
+ transformFollowing(Block(tempDef :: Nil, cpy.Assign(tree)(lhs, ref(tempDef.symbol))))
+ case _ =>
+ tree
+ }
+ recur(tree.lhs)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
new file mode 100644
index 000000000..c9eefb22f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
@@ -0,0 +1,95 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Names._
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, MiniPhaseTransform, TreeTransformer}
+import ast.Trees._
+import Flags._
+import Types._
+import Constants.Constant
+import Contexts.Context
+import Symbols._
+import SymDenotations._
+import Decorators._
+import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
+import dotty.tools.dotc.core.Denotations.SingleDenotation
+import scala.collection.mutable
+import DenotTransformers._
+import typer.Checking
+import Names.Name
+import NameOps._
+import StdNames._
+
+
+/** A no-op transform that checks whether the compiled sources are re-entrant.
+ * If -Ycheck:reentrant is set, the phase makes sure that there are no variables
+ * that are accessible from a global object. It excludes from checking paths that
+ * are labeled with one of the annotations
+ *
+ * @sharable Indicating a class or val can be safely shared
+ * @unshared Indicating an object will not be accessed from multiple threads
+ *
+ * Currently the analysis is only intended to check the dotty compiler itself. To make
+ * it generally useful we'd need to add at least the following:
+ *
+ * - Handle polymorphic instantiation: We might instantiate a generic class
+ * with a type that contains vars. If the class contains fields of the generic
+ * type, this may constitute a path to a shared var, which currently goes undetected.
+ * - Handle arrays: Array elements are currently ignored because they are often used
+ * in an immutable way anyway. To do better, it would be helpful to have a type
+ * for immutable array.
+ */
+class CheckReentrant extends MiniPhaseTransform { thisTransformer =>
+ import ast.tpd._
+
+ override def phaseName = "checkReentrant"
+
+ private var shared: Set[Symbol] = Set()
+ private var seen: Set[ClassSymbol] = Set()
+ private var indent: Int = 0
+
+ private val sharableAnnot = new CtxLazy(implicit ctx =>
+ ctx.requiredClass("dotty.tools.sharable"))
+ private val unsharedAnnot = new CtxLazy(implicit ctx =>
+ ctx.requiredClass("dotty.tools.unshared"))
+
+ def isIgnored(sym: Symbol)(implicit ctx: Context) =
+ sym.hasAnnotation(sharableAnnot()) ||
+ sym.hasAnnotation(unsharedAnnot())
+
+ def scanning(sym: Symbol)(op: => Unit)(implicit ctx: Context): Unit = {
+ ctx.log(i"${" " * indent}scanning $sym")
+ indent += 1
+ try op
+ finally indent -= 1
+ }
+
+ def addVars(cls: ClassSymbol)(implicit ctx: Context): Unit = {
+ if (!seen.contains(cls) && !isIgnored(cls)) {
+ seen += cls
+ scanning(cls) {
+ for (sym <- cls.classInfo.decls)
+ if (sym.isTerm && !sym.isSetter && !isIgnored(sym))
+ if (sym.is(Mutable)) {
+ ctx.error(
+ i"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info}
+ | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""")
+ shared += sym
+ } else if (!sym.is(Method) || sym.is(Accessor | ParamAccessor)) {
+ scanning(sym) {
+ sym.info.widenExpr.classSymbols.foreach(addVars)
+ }
+ }
+ for (parent <- cls.classInfo.classParents)
+ addVars(parent.symbol.asClass)
+ }
+ }
+ }
+
+ override def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (ctx.settings.YcheckReentrant.value && tree.symbol.owner.isStaticOwner)
+ addVars(tree.symbol.owner.asClass)
+ tree
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
new file mode 100644
index 000000000..937a4f1cc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
@@ -0,0 +1,96 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Names._
+import StdNames.nme
+import Types._
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, MiniPhaseTransform, TreeTransformer}
+import ast.Trees._
+import Flags._
+import Contexts.Context
+import Symbols._
+import Constants._
+import Denotations._, SymDenotations._
+import Decorators.StringInterpolators
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
+import scala.collection.mutable
+import DenotTransformers._
+import Names.Name
+import NameOps._
+import Decorators._
+import TypeUtils._
+
+/** A transformer that check that requirements of Static fields\methods are implemented:
+ * 1. Only objects can have members annotated with `@static`
+ * 2. The fields annotated with `@static` should preceed any non-`@static` fields.
+ * This ensures that we do not introduce surprises for users in initialization order.
+ * 3. If a member `foo` of an `object C` is annotated with `@static`,
+ * the companion class `C` is not allowed to define term members with name `foo`.
+ * 4. If a member `foo` of an `object C` is annotated with `@static`, the companion class `C`
+ * is not allowed to inherit classes that define a term member with name `foo`.
+ * 5. Only `@static` methods and vals are supported in companions of traits.
+ * Java8 supports those, but not vars, and JavaScript does not have interfaces at all.
+ * 6. `@static` Lazy vals are currently unsupported.
+ */
+class CheckStatic extends MiniPhaseTransform { thisTransformer =>
+ import ast.tpd._
+
+ override def phaseName = "checkStatic"
+
+
+ def check(tree: tpd.DefTree)(implicit ctx: Context) = {
+
+ }
+
+ override def transformTemplate(tree: tpd.Template)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ val defns = tree.body.collect{case t: ValOrDefDef => t}
+ var hadNonStaticField = false
+ for(defn <- defns) {
+ if (defn.symbol.hasAnnotation(ctx.definitions.ScalaStaticAnnot)) {
+ if(!ctx.owner.is(Module)) {
+ ctx.error("@static fields are only allowed inside objects", defn.pos)
+ }
+
+ if (defn.isInstanceOf[ValDef] && hadNonStaticField) {
+ ctx.error("@static fields should preceed non-static ones", defn.pos)
+ }
+
+ val companion = ctx.owner.companionClass
+ def clashes = companion.asClass.membersNamed(defn.name)
+
+ if (!companion.exists) {
+ ctx.error("object that contains @static members should have companion class", defn.pos)
+ } else if (clashes.exists) {
+ ctx.error("companion classes cannot define members with same name as @static member", defn.pos)
+ } else if (defn.symbol.is(Flags.Mutable) && companion.is(Flags.Trait)) {
+ ctx.error("Companions of traits cannot define mutable @static fields", defn.pos)
+ } else if (defn.symbol.is(Flags.Lazy)) {
+ ctx.error("Lazy @static fields are not supported", defn.pos)
+ } else if (defn.symbol.allOverriddenSymbols.nonEmpty) {
+ ctx.error("@static members cannot override or implement non-static ones", defn.pos)
+ }
+ } else hadNonStaticField = hadNonStaticField || defn.isInstanceOf[ValDef]
+
+ }
+ tree
+ }
+
+ override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (tree.symbol.hasAnnotation(defn.ScalaStaticAnnot)) {
+ val symbolWhitelist = tree.symbol.ownersIterator.flatMap(x => if (x.is(Flags.Module)) List(x, x.companionModule) else List(x)).toSet
+ def isSafeQual(t: Tree): Boolean = { // follow the desugared paths created by typer
+ t match {
+ case t: This => true
+ case t: Select => isSafeQual(t.qualifier) && symbolWhitelist.contains(t.symbol)
+ case t: Ident => symbolWhitelist.contains(t.symbol)
+ case t: Block => t.stats.forall(tpd.isPureExpr) && isSafeQual(t.expr)
+ }
+ }
+ if (isSafeQual(tree.qualifier))
+ ref(tree.symbol)
+ else tree
+ } else tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ClassOf.scala b/compiler/src/dotty/tools/dotc/transform/ClassOf.scala
new file mode 100644
index 000000000..e7b6977c7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ClassOf.scala
@@ -0,0 +1,30 @@
+package dotty.tools.dotc
+package transform
+
+import ast.tpd
+import core.Constants.Constant
+import core.Contexts.Context
+import core.StdNames.nme
+import core.Symbols.{defn,TermSymbol}
+import core.TypeErasure
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo, TreeTransform}
+
+/** Rewrite `classOf` calls as follow:
+ *
+ * For every primitive class C whose boxed class is called B:
+ * classOf[C] -> B.TYPE
+ * For every non-primitive class D:
+ * classOf[D] -> Literal(Constant(erasure(D)))
+ */
+class ClassOf extends MiniPhaseTransform {
+ import tpd._
+
+ override def phaseName: String = "classOf"
+
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (tree.symbol eq defn.Predef_classOf) {
+ val targ = tree.args.head.tpe
+ clsOf(targ).ensureConforms(tree.tpe).withPos(tree.pos)
+ }
+ else tree
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
new file mode 100644
index 000000000..714255962
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
@@ -0,0 +1,116 @@
+package dotty.tools.dotc.transform
+
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransform, TreeTransformer, MiniPhaseTransform}
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import scala.collection.mutable.ListBuffer
+import dotty.tools.dotc.core.{Scopes, Flags}
+import dotty.tools.dotc.core.Symbols.NoSymbol
+import scala.annotation.tailrec
+import dotty.tools.dotc.core._
+import Symbols._
+import scala.Some
+import dotty.tools.dotc.transform.TreeTransforms.{NXTransformations, TransformerInfo, TreeTransform, TreeTransformer}
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import scala.collection.mutable
+import dotty.tools.dotc.core.Names.Name
+import NameOps._
+import Types._
+import scala.collection.SortedSet
+import Decorators._
+import StdNames._
+import dotty.tools.dotc.util.Positions.Position
+import dotty.tools.dotc.config.JavaPlatform
+
+class CollectEntryPoints extends MiniPhaseTransform {
+
+ /** perform context-dependant initialization */
+ override def prepareForUnit(tree: tpd.Tree)(implicit ctx: Context) = {
+ entryPoints = collection.immutable.TreeSet.empty[Symbol](new SymbolOrdering())
+ assert(ctx.platform.isInstanceOf[JavaPlatform], "Java platform specific phase")
+ this
+ }
+
+ private var entryPoints: Set[Symbol] = _
+
+ def getEntryPoints = entryPoints.toList
+
+ override def phaseName: String = "collectEntryPoints"
+ override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (tree.symbol.owner.isClass && isJavaEntryPoint(tree.symbol)) {
+ // collecting symbols for entry points here (as opposed to GenBCode where they are used)
+ // has the advantage of saving an additional pass over all ClassDefs.
+ entryPoints += tree.symbol
+ }
+ tree
+ }
+
+ def isJavaEntryPoint(sym: Symbol)(implicit ctx: Context): Boolean = {
+ def fail(msg: String, pos: Position = sym.pos) = {
+ ctx.warning(sym.name +
+ s" has a main method with parameter type Array[String], but ${sym.fullName} will not be a runnable program.\n Reason: $msg",
+ sourcePos(sym.pos)
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + javaName(sym) + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(s"$msg, which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.flags is Flags.Module) (sym.info nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists {
+ m =>
+ m.info match {
+ case MethodType(_, p :: Nil) =>
+ p.typeSymbol == defn.ArrayClass
+ case _ => false
+ }
+ }
+ def precise(implicit ctx: Context) = {
+ val companion = sym.companionClass //sym.asClass.linkedClassOfClass
+ val javaPlatform = ctx.platform.asInstanceOf[JavaPlatform]
+ if (javaPlatform.hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.exists && companion.info.member(nme.main).exists)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.flags is Flags.Trait)
+ failNoForwarder("companion is a trait")
+ // Now either succeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists (x => javaPlatform.isJavaMainMethod(x.symbol))) || {
+ possibles exists {
+ m =>
+ m.symbol.info match {
+ case t: PolyType =>
+ fail("main methods cannot be generic.")
+ case t@MethodType(paramNames, paramTypes) =>
+ if (t.resultType :: paramTypes exists (_.typeSymbol.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.symbol.pos)
+ else
+ javaPlatform.isJavaMainMethod(m.symbol) || fail("main method must have exact signature (Array[String])Unit", m.symbol.pos)
+ case tp =>
+ fail(s"don't know what this is: $tp", m.symbol.pos)
+ }
+ }
+ }
+ }
+
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && precise(ctx.withPhase(ctx.erasurePhase))
+ // Before erasure so we can identify generic mains.
+
+
+}
+
+}
+
+class SymbolOrdering(implicit ctx: Context) extends Ordering[Symbol] {
+ override def compare(x: Symbol, y: Symbol): Int = {
+ x.fullName.toString.compareTo(y.fullName.toString)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala
new file mode 100644
index 000000000..db850e944
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala
@@ -0,0 +1,261 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import dotty.tools.dotc.ast.tpd._
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.StdNames._
+import Phases._
+import ast._
+import Trees._
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import util.Positions._
+import Constants.Constant
+import collection.mutable
+
+/** This transform
+ * - moves initializers from body to constructor.
+ * - makes all supercalls explicit
+ * - also moves private fields that are accessed only from constructor
+ * into the constructor if possible.
+ */
+class Constructors extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import tpd._
+
+ override def phaseName: String = "constructors"
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Memoize])
+
+
+ // Collect all private parameter accessors and value definitions that need
+ // to be retained. There are several reasons why a parameter accessor or
+ // definition might need to be retained:
+ // 1. It is accessed after the constructor has finished
+ // 2. It is accessed before it is defined
+ // 3. It is accessed on an object other than `this`
+ // 4. It is a mutable parameter accessor
+ // 5. It is has a wildcard initializer `_`
+ private val retainedPrivateVals = mutable.Set[Symbol]()
+ private val seenPrivateVals = mutable.Set[Symbol]()
+
+ private def markUsedPrivateSymbols(tree: RefTree)(implicit ctx: Context): Unit = {
+
+ val sym = tree.symbol
+ def retain() =
+ retainedPrivateVals.add(sym)
+
+ if (sym.exists && sym.owner.isClass && mightBeDropped(sym)) {
+ val owner = sym.owner.asClass
+
+ tree match {
+ case Ident(_) | Select(This(_), _) =>
+ def inConstructor = {
+ val method = ctx.owner.enclosingMethod
+ method.isPrimaryConstructor && ctx.owner.enclosingClass == owner
+ }
+ if (inConstructor && (sym.is(ParamAccessor) || seenPrivateVals.contains(sym))) {
+ // used inside constructor, accessed on this,
+ // could use constructor argument instead, no need to retain field
+ }
+ else retain()
+ case _ => retain()
+ }
+ }
+ }
+
+ override def transformIdent(tree: tpd.Ident)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ markUsedPrivateSymbols(tree)
+ tree
+ }
+
+ override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ markUsedPrivateSymbols(tree)
+ tree
+ }
+
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (mightBeDropped(tree.symbol))
+ (if (isWildcardStarArg(tree.rhs)) retainedPrivateVals else seenPrivateVals) += tree.symbol
+ tree
+ }
+
+ /** All initializers for non-lazy fields should be moved into constructor.
+ * All non-abstract methods should be implemented (this is assured for constructors
+ * in this phase and for other methods in memoize).
+ */
+ override def checkPostCondition(tree: tpd.Tree)(implicit ctx: Context): Unit = {
+ tree match {
+ case tree: ValDef if tree.symbol.exists && tree.symbol.owner.isClass && !tree.symbol.is(Lazy) && !tree.symbol.hasAnnotation(defn.ScalaStaticAnnot) =>
+ assert(tree.rhs.isEmpty, i"$tree: initializer should be moved to constructors")
+ case tree: DefDef if !tree.symbol.is(LazyOrDeferred) =>
+ assert(!tree.rhs.isEmpty, i"unimplemented: $tree")
+ case _ =>
+ }
+ }
+
+ /** @return true if after ExplicitOuter, all references from this tree go via an
+ * outer link, so no parameter accessors need to be rewired to parameters
+ */
+ private def noDirectRefsFrom(tree: Tree)(implicit ctx: Context) =
+ tree.isDef && tree.symbol.isClass && !tree.symbol.is(InSuperCall)
+
+ /** Class members that can be eliminated if referenced only from their own
+ * constructor.
+ */
+ private def mightBeDropped(sym: Symbol)(implicit ctx: Context) =
+ sym.is(Private, butNot = MethodOrLazy) && !sym.is(MutableParamAccessor)
+
+ private final val MutableParamAccessor = allOf(Mutable, ParamAccessor)
+
+ override def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val cls = ctx.owner.asClass
+
+ val constr @ DefDef(nme.CONSTRUCTOR, Nil, vparams :: Nil, _, EmptyTree) = tree.constr
+
+ // Produce aligned accessors and constructor parameters. We have to adjust
+ // for any outer parameters, which are last in the sequence of original
+ // parameter accessors but come first in the constructor parameter list.
+ val accessors = cls.paramAccessors.filterNot(_.isSetter)
+ val vparamsWithOuterLast = vparams match {
+ case vparam :: rest if vparam.name == nme.OUTER => rest ::: vparam :: Nil
+ case _ => vparams
+ }
+ val paramSyms = vparamsWithOuterLast map (_.symbol)
+
+ // Adjustments performed when moving code into the constructor:
+ // (1) Replace references to param accessors by constructor parameters
+ // except possibly references to mutable variables, if `excluded = Mutable`.
+ // (Mutable parameters should be replaced only during the super call)
+ // (2) If the parameter accessor reference was to an alias getter,
+ // drop the () when replacing by the parameter.
+ object intoConstr extends TreeMap {
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case Ident(_) | Select(This(_), _) =>
+ var sym = tree.symbol
+ if (sym is (ParamAccessor, butNot = Mutable)) sym = sym.subst(accessors, paramSyms)
+ if (sym.owner.isConstructor) ref(sym).withPos(tree.pos) else tree
+ case Apply(fn, Nil) =>
+ val fn1 = transform(fn)
+ if ((fn1 ne fn) && fn1.symbol.is(Param) && fn1.symbol.owner.isPrimaryConstructor)
+ fn1 // in this case, fn1.symbol was an alias for a parameter in a superclass
+ else cpy.Apply(tree)(fn1, Nil)
+ case _ =>
+ if (noDirectRefsFrom(tree)) tree else super.transform(tree)
+ }
+
+ def apply(tree: Tree, prevOwner: Symbol)(implicit ctx: Context): Tree = {
+ transform(tree).changeOwnerAfter(prevOwner, constr.symbol, thisTransform)
+ }
+ }
+
+ def isRetained(acc: Symbol) = {
+ !mightBeDropped(acc) || retainedPrivateVals(acc)
+ }
+
+ val constrStats, clsStats = new mutable.ListBuffer[Tree]
+
+ /** Map outer getters $outer and outer accessors $A$B$$$outer to the given outer parameter. */
+ def mapOuter(outerParam: Symbol) = new TreeMap {
+ override def transform(tree: Tree)(implicit ctx: Context) = tree match {
+ case Apply(fn, Nil)
+ if (fn.symbol.is(OuterAccessor)
+ || fn.symbol.isGetter && fn.symbol.name == nme.OUTER
+ ) &&
+ fn.symbol.info.resultType.classSymbol == outerParam.info.classSymbol =>
+ ref(outerParam)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+ val dropped = mutable.Set[Symbol]()
+
+ // Split class body into statements that go into constructor and
+ // definitions that are kept as members of the class.
+ def splitStats(stats: List[Tree]): Unit = stats match {
+ case stat :: stats1 =>
+ stat match {
+ case stat @ ValDef(name, tpt, _) if !stat.symbol.is(Lazy) && !stat.symbol.hasAnnotation(defn.ScalaStaticAnnot) =>
+ val sym = stat.symbol
+ if (isRetained(sym)) {
+ if (!stat.rhs.isEmpty && !isWildcardArg(stat.rhs))
+ constrStats += Assign(ref(sym), intoConstr(stat.rhs, sym)).withPos(stat.pos)
+ clsStats += cpy.ValDef(stat)(rhs = EmptyTree)
+ }
+ else if (!stat.rhs.isEmpty) {
+ dropped += sym
+ sym.copySymDenotation(
+ initFlags = sym.flags &~ Private,
+ owner = constr.symbol).installAfter(thisTransform)
+ constrStats += intoConstr(stat, sym)
+ }
+ case DefDef(nme.CONSTRUCTOR, _, ((outerParam @ ValDef(nme.OUTER, _, _)) :: _) :: Nil, _, _) =>
+ clsStats += mapOuter(outerParam.symbol).transform(stat)
+ case _: DefTree =>
+ clsStats += stat
+ case _ =>
+ constrStats += intoConstr(stat, tree.symbol)
+ }
+ splitStats(stats1)
+ case Nil =>
+ (Nil, Nil)
+ }
+ splitStats(tree.body)
+
+ // The initializers for the retained accessors */
+ val copyParams = accessors flatMap { acc =>
+ if (!isRetained(acc)) {
+ dropped += acc
+ Nil
+ } else {
+ val target = if (acc.is(Method)) acc.field else acc
+ if (!target.exists) Nil // this case arises when the parameter accessor is an alias
+ else {
+ val param = acc.subst(accessors, paramSyms)
+ val assigns = Assign(ref(target), ref(param)).withPos(tree.pos) :: Nil
+ if (acc.name != nme.OUTER) assigns
+ else {
+ // insert test: if ($outer eq null) throw new NullPointerException
+ val nullTest =
+ If(ref(param).select(defn.Object_eq).appliedTo(Literal(Constant(null))),
+ Throw(New(defn.NullPointerExceptionClass.typeRef, Nil)),
+ unitLiteral)
+ nullTest :: assigns
+ }
+ }
+ }
+ }
+
+ // Drop accessors that are not retained from class scope
+ if (dropped.nonEmpty) {
+ val clsInfo = cls.classInfo
+ cls.copy(
+ info = clsInfo.derivedClassInfo(
+ decls = clsInfo.decls.filteredScope(!dropped.contains(_))))
+
+ // TODO: this happens to work only because Constructors is the last phase in group
+ }
+
+ val (superCalls, followConstrStats) = constrStats.toList match {
+ case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest)
+ case stats => (Nil, stats)
+ }
+
+ val mappedSuperCalls = vparams match {
+ case (outerParam @ ValDef(nme.OUTER, _, _)) :: _ =>
+ superCalls.map(mapOuter(outerParam.symbol).transform)
+ case _ => superCalls
+ }
+
+ cpy.Template(tree)(
+ constr = cpy.DefDef(constr)(
+ rhs = Block(copyParams ::: mappedSuperCalls ::: followConstrStats, unitLiteral)),
+ body = clsStats.toList)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/CrossCastAnd.scala b/compiler/src/dotty/tools/dotc/transform/CrossCastAnd.scala
new file mode 100644
index 000000000..4fc4ef10b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CrossCastAnd.scala
@@ -0,0 +1,30 @@
+package dotty.tools.dotc.transform
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Flags
+import dotty.tools.dotc.core.Types.{NoType, Type, AndType}
+import dotty.tools.dotc.transform.TreeTransforms._
+import tpd._
+
+import scala.collection.mutable.ListBuffer
+
+
+/**
+ * This transform makes sure that all private member selections from
+ * AndTypes are performed from the first component of AndType.
+ * This is needed for correctness of erasure. See `tests/run/PrivateAnd.scala`
+ */
+class CrossCastAnd extends MiniPhaseTransform { thisTransform =>
+
+ override def phaseName: String = "crossCast"
+
+ override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+
+ lazy val qtype = tree.qualifier.tpe.widen
+ val sym = tree.symbol
+ if (sym.is(Flags.Private) && qtype.typeSymbol != sym.owner)
+ cpy.Select(tree)(tree.qualifier.asInstance(AndType(qtype.baseTypeWithArgs(sym.owner), tree.qualifier.tpe)), tree.name)
+ else tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
new file mode 100644
index 000000000..7b317abef
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
@@ -0,0 +1,23 @@
+package dotty.tools.dotc
+package transform
+import core.Contexts.Context
+
+/** Utility class for lazy values whose evaluation depends on a context.
+ * This should be used whenever the evaluation of a lazy expression
+ * depends on some context, but the value can be re-used afterwards
+ * with a different context.
+ *
+ * A typical use case is a lazy val in a phase object which exists once per root context where
+ * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
+ */
+class CtxLazy[T](expr: Context => T) {
+ private var myValue: T = _
+ private var forced = false
+ def apply()(implicit ctx: Context): T = {
+ if (!forced) {
+ myValue = expr(ctx)
+ forced = true
+ }
+ myValue
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
new file mode 100644
index 000000000..7b37c5881
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
@@ -0,0 +1,98 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers.SymTransformer
+import Phases.Phase
+import Contexts.Context
+import Flags._
+import Symbols._
+import SymDenotations.SymDenotation
+import ast.Trees._
+import collection.mutable
+import Decorators._
+import NameOps._
+import TreeTransforms.MiniPhaseTransform
+import dotty.tools.dotc.transform.TreeTransforms.TransformerInfo
+
+/** Remove companion objects that are empty
+ * Lots of constraints here:
+ * 1. It's impractical to place DropEmptyCompanions before lambda lift because dropped
+ * modules can be anywhere and have hard to trace references.
+ * 2. DropEmptyCompanions cannot be interleaved with LambdaLift or Flatten because
+ * they put things in liftedDefs sets which cause them to surface later. So
+ * removed modules resurface.
+ * 3. DropEmptyCompanions has to be before RestoreScopes.
+ * The solution to the constraints is to put DropEmptyCompanions between Flatten
+ * and RestoreScopes and to only start working once we are back on PackageDef
+ * level, so we know that all objects moved by LambdaLift and Flatten have arrived
+ * at their destination.
+ */
+class DropEmptyCompanions extends MiniPhaseTransform { thisTransform =>
+ import ast.tpd._
+ override def phaseName = "dropEmptyCompanions"
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Flatten])
+
+ override def transformPackageDef(pdef: PackageDef)(implicit ctx: Context, info: TransformerInfo) = {
+
+ /** Is `tree` an empty companion object? */
+ def isEmptyCompanion(tree: Tree) = tree match {
+ case TypeDef(_, impl: Template) if tree.symbol.is(SyntheticModule) &&
+ tree.symbol.companionClass.exists &&
+ impl.body.forall(_.symbol.isPrimaryConstructor) =>
+ ctx.log(i"removing ${tree.symbol}")
+ true
+ case _ =>
+ false
+ }
+
+ val dropped = pdef.stats.filter(isEmptyCompanion).map(_.symbol).toSet
+
+ /** Symbol is a $lzy field representing a module */
+ def isLazyModuleVar(sym: Symbol) =
+ sym.name.isLazyLocal &&
+ sym.owner.info.decl(sym.name.asTermName.nonLazyName).symbol.is(Module)
+
+ /** Symbol should be dropped together with a dropped companion object.
+ * Such symbols are:
+ * - lzy fields pointing to modules,
+ * - vals and getters representing modules.
+ */
+ def symIsDropped(sym: Symbol): Boolean =
+ (sym.is(Module) || isLazyModuleVar(sym)) &&
+ dropped.contains(sym.info.resultType.typeSymbol)
+
+ /** Tree should be dropped because it (is associated with) an empty
+ * companion object. Such trees are
+ * - module classes of empty companion objects
+ * - definitions of lazy module variables or assignments to them.
+ * - vals and getters for empty companion objects
+ */
+ def toDrop(stat: Tree): Boolean = stat match {
+ case stat: TypeDef => dropped.contains(stat.symbol)
+ case stat: ValOrDefDef => symIsDropped(stat.symbol)
+ case stat: Assign => symIsDropped(stat.lhs.symbol)
+ case _ => false
+ }
+
+ def prune(tree: Tree): Tree = tree match {
+ case tree @ TypeDef(name, impl @ Template(constr, _, _, _)) =>
+ cpy.TypeDef(tree)(
+ rhs = cpy.Template(impl)(
+ constr = cpy.DefDef(constr)(rhs = pruneLocals(constr.rhs)),
+ body = pruneStats(impl.body)))
+ case _ =>
+ tree
+ }
+
+ def pruneStats(stats: List[Tree]) =
+ stats.filterConserve(!toDrop(_)).mapConserve(prune)
+
+ def pruneLocals(expr: Tree) = expr match {
+ case Block(stats, expr) => cpy.Block(expr)(pruneStats(stats), expr)
+ case _ => expr
+ }
+
+ cpy.PackageDef(pdef)(pdef.pid, pruneStats(pdef.stats))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/DropInlined.scala b/compiler/src/dotty/tools/dotc/transform/DropInlined.scala
new file mode 100644
index 000000000..775663b5c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/DropInlined.scala
@@ -0,0 +1,15 @@
+package dotty.tools.dotc
+package transform
+
+import typer.Inliner
+import core.Contexts.Context
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Drop Inlined nodes */
+class DropInlined extends MiniPhaseTransform {
+ import ast.tpd._
+ override def phaseName = "dropInlined"
+
+ override def transformInlined(tree: Inlined)(implicit ctx: Context, info: TransformerInfo): Tree =
+ Inliner.dropInlined(tree)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
new file mode 100644
index 000000000..192227261
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
@@ -0,0 +1,129 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core._
+import DenotTransformers._
+import Symbols._
+import SymDenotations._
+import Contexts._
+import Types._
+import Flags._
+import Decorators._
+import SymUtils._
+import util.Attachment
+import core.StdNames.nme
+import ast.Trees._
+
+/** This phase eliminates ExprTypes `=> T` as types of function parameters, and replaces them by
+ * nullary function types. More precisely:
+ *
+ * For the types of parameter symbols:
+ *
+ * => T ==> () => T
+ *
+ * Note that `=> T` types are not eliminated in MethodTypes. This is done later at erasure.
+ * Terms are rewritten as follows:
+ *
+ * x ==> x.apply() if x is a parameter that had type => T
+ *
+ * Arguments to call-by-name parameters are translated as follows. First, the argument is
+ * rewritten by the rules
+ *
+ * e.apply() ==> e if e.apply() is an argument to a call-by-name parameter
+ * expr ==> () => expr if other expr is an argument to a call-by-name parameter
+ *
+ * This makes the argument compatible with a parameter type of () => T, which will be the
+ * formal parameter type at erasure. But to be -Ycheckable until then, any argument
+ * ARG rewritten by the rules above is again wrapped in an application DummyApply(ARG)
+ * where
+ *
+ * DummyApply: [T](() => T): T
+ *
+ * is a synthetic method defined in Definitions. Erasure will later strip these DummyApply wrappers.
+ *
+ * Note: This scheme to have inconsistent types between method types (whose formal types are still
+ * ExprTypes and parameter valdefs (which are now FunctionTypes) is not pretty. There are two
+ * other options which have been abandoned or not yet pursued.
+ *
+ * Option 1: Transform => T to () => T also in method and function types. The problem with this is
+ * that is that it requires to look at every type, and this forces too much, causing
+ * Cyclic Reference errors. Abandoned for this reason.
+ *
+ * Option 2: Merge ElimByName with erasure, or have it run immediately before. This has not been
+ * tried yet.
+ */
+class ElimByName extends MiniPhaseTransform with InfoTransformer { thisTransformer =>
+ import ast.tpd._
+
+ override def phaseName: String = "elimByName"
+
+ override def runsAfterGroupsOf = Set(classOf[Splitter])
+ // assumes idents and selects have symbols; interferes with splitter distribution
+ // that's why it's "after group".
+
+ /** The info of the tree's symbol at phase Nullarify (i.e. before transformation) */
+ private def originalDenotation(tree: Tree)(implicit ctx: Context) =
+ tree.symbol.denot(ctx.withPhase(thisTransformer))
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ ctx.traceIndented(s"transforming ${tree.show} at phase ${ctx.phase}", show = true) {
+
+ def transformArg(arg: Tree, formal: Type): Tree = formal.dealias match {
+ case formalExpr: ExprType =>
+ val argType = arg.tpe.widen
+ val argFun = arg match {
+ case Apply(Select(qual, nme.apply), Nil)
+ if qual.tpe.derivesFrom(defn.FunctionClass(0)) && isPureExpr(qual) =>
+ qual
+ case _ =>
+ val inSuper = if (ctx.mode.is(Mode.InSuperCall)) InSuperCall else EmptyFlags
+ val meth = ctx.newSymbol(
+ ctx.owner, nme.ANON_FUN, Synthetic | Method | inSuper, MethodType(Nil, Nil, argType))
+ Closure(meth, _ => arg.changeOwner(ctx.owner, meth))
+ }
+ ref(defn.dummyApply).appliedToType(argType).appliedTo(argFun)
+ case _ =>
+ arg
+ }
+
+ val MethodType(_, formals) = tree.fun.tpe.widen
+ val args1 = tree.args.zipWithConserve(formals)(transformArg)
+ cpy.Apply(tree)(tree.fun, args1)
+ }
+
+ /** If denotation had an ExprType before, it now gets a function type */
+ private def exprBecomesFunction(symd: SymDenotation)(implicit ctx: Context) =
+ (symd is Param) || (symd is (ParamAccessor, butNot = Method))
+
+ /** Map `tree` to `tree.apply()` is `ftree` was of ExprType and becomes now a function */
+ private def applyIfFunction(tree: Tree, ftree: Tree)(implicit ctx: Context) = {
+ val origDenot = originalDenotation(ftree)
+ if (exprBecomesFunction(origDenot) && (origDenot.info.isInstanceOf[ExprType]))
+ tree.select(defn.Function0_apply).appliedToNone
+ else tree
+ }
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo): Tree =
+ applyIfFunction(tree, tree)
+
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree = tree match {
+ case TypeApply(Select(_, nme.asInstanceOf_), arg :: Nil) =>
+ // tree might be of form e.asInstanceOf[x.type] where x becomes a function.
+ // See pos/t296.scala
+ applyIfFunction(tree, arg)
+ case _ => tree
+ }
+
+ override def transformValDef(tree: ValDef)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (exprBecomesFunction(tree.symbol))
+ cpy.ValDef(tree)(tpt = tree.tpt.withType(tree.symbol.info))
+ else tree
+
+ def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type = tp match {
+ case ExprType(rt) if exprBecomesFunction(sym) => defn.FunctionOf(Nil, rt)
+ case _ => tp
+ }
+
+ override def mayChange(sym: Symbol)(implicit ctx: Context): Boolean = sym.isTerm
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
new file mode 100644
index 000000000..24c8cdc8d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
@@ -0,0 +1,84 @@
+package dotty.tools.dotc
+package transform
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import TreeTransforms._, Phases.Phase
+import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
+import TypeErasure.ErasedValueType, ValueClasses._
+
+/** This phase erases ErasedValueType to their underlying type.
+ * It also removes the synthetic cast methods u2evt$ and evt2u$ which are
+ * no longer needed afterwards.
+ */
+class ElimErasedValueType extends MiniPhaseTransform with InfoTransformer {
+
+ import tpd._
+
+ override def phaseName: String = "elimErasedValueType"
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure])
+
+ def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type = sym match {
+ case sym: ClassSymbol if sym is ModuleClass =>
+ sym.companionClass match {
+ case origClass: ClassSymbol if isDerivedValueClass(origClass) =>
+ val cinfo = tp.asInstanceOf[ClassInfo]
+ val decls1 = cinfo.decls.cloneScope
+ ctx.atPhase(this.next) { implicit ctx =>
+ // Remove synthetic cast methods introduced by ExtensionMethods,
+ // they are no longer needed after this phase.
+ decls1.unlink(cinfo.decl(nme.U2EVT).symbol)
+ decls1.unlink(cinfo.decl(nme.EVT2U).symbol)
+ }
+ cinfo.derivedClassInfo(decls = decls1)
+ case _ =>
+ tp
+ }
+ case _ =>
+ elimEVT(tp)
+ }
+
+ def elimEVT(tp: Type)(implicit ctx: Context): Type = tp match {
+ case ErasedValueType(_, underlying) =>
+ elimEVT(underlying)
+ case tp: MethodType =>
+ val paramTypes = tp.paramTypes.mapConserve(elimEVT)
+ val retType = elimEVT(tp.resultType)
+ tp.derivedMethodType(tp.paramNames, paramTypes, retType)
+ case _ =>
+ tp
+ }
+
+ def transformTypeOfTree(tree: Tree)(implicit ctx: Context): Tree =
+ tree.withType(elimEVT(tree.tpe))
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val Apply(fun, args) = tree
+
+ // The casts to and from ErasedValueType are no longer needed once ErasedValueType
+ // has been eliminated.
+ val t =
+ if (fun.symbol.isValueClassConvertMethod)
+ args.head
+ else
+ tree
+ transformTypeOfTree(t)
+ }
+
+ override def transformInlined(tree: Inlined)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+
+ // FIXME: transformIf and transformBlock won't be required anymore once #444 is fixed.
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+ override def transformBlock(tree: Block)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+ override def transformIf(tree: If)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+ override def transformTypeTree(tree: TypeTree)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
new file mode 100644
index 000000000..258b7f234
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
@@ -0,0 +1,135 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Names._
+import StdNames.nme
+import Types._
+import dotty.tools.dotc.transform.TreeTransforms.{AnnotationTransformer, TransformerInfo, MiniPhaseTransform, TreeTransformer}
+import ast.Trees._
+import Flags._
+import Contexts.Context
+import Symbols._
+import Constants._
+import Denotations._, SymDenotations._
+import Decorators.StringInterpolators
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
+import scala.collection.mutable
+import DenotTransformers._
+import Names.Name
+import NameOps._
+import TypeUtils._
+
+/** A transformer that removes repeated parameters (T*) from all types, replacing
+ * them with Seq types.
+ */
+class ElimRepeated extends MiniPhaseTransform with InfoTransformer with AnnotationTransformer { thisTransformer =>
+ import ast.tpd._
+
+ override def phaseName = "elimRepeated"
+
+ def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type =
+ elimRepeated(tp)
+
+ override def mayChange(sym: Symbol)(implicit ctx: Context): Boolean = sym is Method
+
+ private def elimRepeated(tp: Type)(implicit ctx: Context): Type = tp.stripTypeVar match {
+ case tp @ MethodType(paramNames, paramTypes) =>
+ val resultType1 = elimRepeated(tp.resultType)
+ val paramTypes1 =
+ if (paramTypes.nonEmpty && paramTypes.last.isRepeatedParam) {
+ val last = paramTypes.last.underlyingIfRepeated(tp.isJava)
+ paramTypes.init :+ last
+ } else paramTypes
+ tp.derivedMethodType(paramNames, paramTypes1, resultType1)
+ case tp: PolyType =>
+ tp.derivedPolyType(tp.paramNames, tp.paramBounds, elimRepeated(tp.resultType))
+ case tp =>
+ tp
+ }
+
+ def transformTypeOfTree(tree: Tree)(implicit ctx: Context): Tree =
+ tree.withType(elimRepeated(tree.tpe))
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val args1 = tree.args.map {
+ case arg: Typed if isWildcardStarArg(arg) =>
+ if (tree.fun.symbol.is(JavaDefined) && arg.expr.tpe.derivesFrom(defn.SeqClass))
+ seqToArray(arg.expr)
+ else arg.expr
+ case arg => arg
+ }
+ transformTypeOfTree(cpy.Apply(tree)(tree.fun, args1))
+ }
+
+ /** Convert sequence argument to Java array */
+ private def seqToArray(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case SeqLiteral(elems, elemtpt) =>
+ JavaSeqLiteral(elems, elemtpt)
+ case _ =>
+ val elemType = tree.tpe.elemType
+ var elemClass = elemType.classSymbol
+ if (defn.PhantomClasses contains elemClass) elemClass = defn.ObjectClass
+ ref(defn.DottyArraysModule)
+ .select(nme.seqToArray)
+ .appliedToType(elemType)
+ .appliedTo(tree, Literal(Constant(elemClass.typeRef)))
+ .ensureConforms(defn.ArrayOf(elemType))
+ // Because of phantomclasses, the Java array's type might not conform to the return type
+ }
+
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+
+ /** If method overrides a Java varargs method, add a varargs bridge.
+ * Also transform trees inside method annotation
+ */
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ assert(ctx.phase == thisTransformer)
+ def overridesJava = tree.symbol.allOverriddenSymbols.exists(_ is JavaDefined)
+ if (tree.symbol.info.isVarArgsMethod && overridesJava)
+ addVarArgsBridge(tree)(ctx.withPhase(thisTransformer.next))
+ else
+ tree
+ }
+
+ /** Add a Java varargs bridge
+ * @param ddef the original method definition which is assumed to override
+ * a Java varargs method JM up to this phase.
+ * @return a thicket consisting of `ddef` and a varargs bridge method
+ * which overrides the Java varargs method JM from this phase on
+ * and forwards to `ddef`.
+ */
+ private def addVarArgsBridge(ddef: DefDef)(implicit ctx: Context): Tree = {
+ val original = ddef.symbol.asTerm
+ val bridge = original.copy(
+ flags = ddef.symbol.flags &~ Private | Artifact,
+ info = toJavaVarArgs(ddef.symbol.info)).enteredAfter(thisTransformer).asTerm
+ val bridgeDef = polyDefDef(bridge, trefs => vrefss => {
+ val (vrefs :+ varArgRef) :: vrefss1 = vrefss
+ val elemtp = varArgRef.tpe.widen.argTypes.head
+ ref(original.termRef)
+ .appliedToTypes(trefs)
+ .appliedToArgs(vrefs :+ TreeGen.wrapArray(varArgRef, elemtp))
+ .appliedToArgss(vrefss1)
+ })
+ Thicket(ddef, bridgeDef)
+ }
+
+ /** Convert type from Scala to Java varargs method */
+ private def toJavaVarArgs(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: PolyType =>
+ tp.derivedPolyType(tp.paramNames, tp.paramBounds, toJavaVarArgs(tp.resultType))
+ case tp: MethodType =>
+ val inits :+ last = tp.paramTypes
+ val last1 = last.underlyingIfRepeated(isJava = true)
+ tp.derivedMethodType(tp.paramNames, inits :+ last1, tp.resultType)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
new file mode 100644
index 000000000..0601e0122
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
@@ -0,0 +1,40 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Contexts.Context
+import Flags._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.SymDenotations.SymDenotation
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+import dotty.tools.dotc.core.Types.{ThisType, TermRef}
+
+/** Replace This references to module classes in static methods by global identifiers to the
+ * corresponding modules.
+ */
+class ElimStaticThis extends MiniPhaseTransform {
+ import ast.tpd._
+ def phaseName: String = "elimStaticThis"
+
+ override def transformThis(tree: This)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (!tree.symbol.is(Package) && ctx.owner.enclosingMethod.is(JavaStatic)) {
+ assert(tree.symbol.is(ModuleClass))
+ ref(tree.symbol.sourceModule)
+ }
+ else tree
+
+ override def transformIdent(tree: tpd.Ident)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (ctx.owner.enclosingMethod.is(JavaStatic)) {
+ tree.tpe match {
+ case TermRef(thiz: ThisType, _) if thiz.cls.is(ModuleClass) =>
+ ref(thiz.cls.sourceModule).select(tree.symbol)
+ case TermRef(thiz: ThisType, _) =>
+ assert(tree.symbol.is(Flags.JavaStatic))
+ tree
+ case _ => tree
+ }
+ }
+ else tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
new file mode 100644
index 000000000..069176111
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
@@ -0,0 +1,664 @@
+package dotty.tools.dotc
+package transform
+
+import core.Phases._
+import core.DenotTransformers._
+import core.Denotations._
+import core.SymDenotations._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Names._
+import core.StdNames._
+import core.NameOps._
+import core.Decorators._
+import core.Constants._
+import typer.NoChecking
+import typer.ProtoTypes._
+import typer.ErrorReporting._
+import core.TypeErasure._
+import core.Decorators._
+import dotty.tools.dotc.ast.{Trees, tpd, untpd}
+import ast.Trees._
+import scala.collection.mutable.ListBuffer
+import dotty.tools.dotc.core.{Constants, Flags}
+import ValueClasses._
+import TypeUtils._
+import ExplicitOuter._
+import core.Mode
+
+class Erasure extends Phase with DenotTransformer { thisTransformer =>
+
+ override def phaseName: String = "erasure"
+
+ /** List of names of phases that should precede this phase */
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[InterceptedMethods], classOf[Splitter], classOf[ElimRepeated])
+
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref match {
+ case ref: SymDenotation =>
+ assert(ctx.phase == this, s"transforming $ref at ${ctx.phase}")
+ if (ref.symbol eq defn.ObjectClass) {
+ // Aftre erasure, all former Any members are now Object members
+ val ClassInfo(pre, _, ps, decls, selfInfo) = ref.info
+ val extendedScope = decls.cloneScope
+ for (decl <- defn.AnyClass.classInfo.decls)
+ if (!decl.isConstructor) extendedScope.enter(decl)
+ ref.copySymDenotation(
+ info = transformInfo(ref.symbol,
+ ClassInfo(pre, defn.ObjectClass, ps, extendedScope, selfInfo))
+ )
+ }
+ else {
+ val oldSymbol = ref.symbol
+ val newSymbol =
+ if ((oldSymbol.owner eq defn.AnyClass) && oldSymbol.isConstructor)
+ defn.ObjectClass.primaryConstructor
+ else oldSymbol
+ val oldOwner = ref.owner
+ val newOwner = if (oldOwner eq defn.AnyClass) defn.ObjectClass else oldOwner
+ val oldInfo = ref.info
+ val newInfo = transformInfo(ref.symbol, oldInfo)
+ val oldFlags = ref.flags
+ val newFlags = ref.flags &~ Flags.HasDefaultParams // HasDefaultParams needs to be dropped because overriding might become overloading
+ // TODO: define derivedSymDenotation?
+ if ((oldSymbol eq newSymbol) && (oldOwner eq newOwner) && (oldInfo eq newInfo) && (oldFlags == newFlags)) ref
+ else {
+ assert(!ref.is(Flags.PackageClass), s"trans $ref @ ${ctx.phase} oldOwner = $oldOwner, newOwner = $newOwner, oldInfo = $oldInfo, newInfo = $newInfo ${oldOwner eq newOwner} ${oldInfo eq newInfo}")
+ ref.copySymDenotation(symbol = newSymbol, owner = newOwner, initFlags = newFlags, info = newInfo)
+ }
+ }
+ case ref =>
+ ref.derivedSingleDenotation(ref.symbol, transformInfo(ref.symbol, ref.info))
+ }
+
+ val eraser = new Erasure.Typer
+
+ def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ unit.tpdTree = eraser.typedExpr(unit.tpdTree)(ctx.fresh.setPhase(this.next))
+ }
+
+ override def checkPostCondition(tree: tpd.Tree)(implicit ctx: Context) = {
+ assertErased(tree)
+ tree match {
+ case res: tpd.This =>
+ assert(!ExplicitOuter.referencesOuter(ctx.owner.enclosingClass, res),
+ i"Reference to $res from ${ctx.owner.showLocated}")
+ case ret: tpd.Return =>
+ // checked only after erasure, as checking before erasure is complicated
+ // due presence of type params in returned types
+ val from = if (ret.from.isEmpty) ctx.owner.enclosingMethod else ret.from.symbol
+ val rType = from.info.finalResultType
+ assert(ret.expr.tpe <:< rType,
+ i"Returned value:${ret.expr} does not conform to result type(${ret.expr.tpe.widen} of method $from")
+ case _ =>
+ }
+ }
+
+ /** Assert that tree type and its widened underlying type are erased.
+ * Also assert that term refs have fixed symbols (so we are sure
+ * they need not be reloaded using member; this would likely fail as signatures
+ * may change after erasure).
+ */
+ def assertErased(tree: tpd.Tree)(implicit ctx: Context): Unit = {
+ assertErased(tree.typeOpt, tree)
+ if (!defn.isPolymorphicAfterErasure(tree.symbol))
+ assertErased(tree.typeOpt.widen, tree)
+ if (ctx.mode.isExpr)
+ tree.tpe match {
+ case ref: TermRef =>
+ assert(ref.denot.isInstanceOf[SymDenotation] ||
+ ref.denot.isInstanceOf[UniqueRefDenotation],
+ i"non-sym type $ref of class ${ref.getClass} with denot of class ${ref.denot.getClass} of $tree")
+ case _ =>
+ }
+ }
+
+ def assertErased(tp: Type, tree: tpd.Tree = tpd.EmptyTree)(implicit ctx: Context): Unit =
+ if (tp.typeSymbol == defn.ArrayClass &&
+ ctx.compilationUnit.source.file.name == "Array.scala") {} // ok
+ else
+ assert(isErasedType(tp),
+ i"The type $tp - ${tp.toString} of class ${tp.getClass} of tree $tree : ${tree.tpe} / ${tree.getClass} is illegal after erasure, phase = ${ctx.phase.prev}")
+}
+
+object Erasure extends TypeTestsCasts{
+
+ import tpd._
+
+ object Boxing {
+
+ def isUnbox(sym: Symbol)(implicit ctx: Context) =
+ sym.name == nme.unbox && sym.owner.linkedClass.isPrimitiveValueClass
+
+ def isBox(sym: Symbol)(implicit ctx: Context) =
+ sym.name == nme.box && sym.owner.linkedClass.isPrimitiveValueClass
+
+ def boxMethod(cls: ClassSymbol)(implicit ctx: Context) =
+ cls.linkedClass.info.member(nme.box).symbol
+ def unboxMethod(cls: ClassSymbol)(implicit ctx: Context) =
+ cls.linkedClass.info.member(nme.unbox).symbol
+
+ /** Isf this tree is an unbox operation which can be safely removed
+ * when enclosed in a box, the unboxed argument, otherwise EmptyTree.
+ * Note that one can't always remove a Box(Unbox(x)) combination because the
+ * process of unboxing x may lead to throwing an exception.
+ * This is important for specialization: calls to the super constructor should not box/unbox specialized
+ * fields (see TupleX). (ID)
+ */
+ private def safelyRemovableUnboxArg(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case Apply(fn, arg :: Nil)
+ if isUnbox(fn.symbol) && defn.ScalaBoxedClasses().contains(arg.tpe.widen.typeSymbol) =>
+ arg
+ case _ =>
+ EmptyTree
+ }
+
+ def constant(tree: Tree, const: Tree)(implicit ctx: Context) =
+ if (isPureExpr(tree)) const else Block(tree :: Nil, const)
+
+ final def box(tree: Tree, target: => String = "")(implicit ctx: Context): Tree = ctx.traceIndented(i"boxing ${tree.showSummary}: ${tree.tpe} into $target") {
+ tree.tpe.widen match {
+ case ErasedValueType(tycon, _) =>
+ New(tycon, cast(tree, underlyingOfValueClass(tycon.symbol.asClass)) :: Nil) // todo: use adaptToType?
+ case tp =>
+ val cls = tp.classSymbol
+ if (cls eq defn.UnitClass) constant(tree, ref(defn.BoxedUnit_UNIT))
+ else if (cls eq defn.NothingClass) tree // a non-terminating expression doesn't need boxing
+ else {
+ assert(cls ne defn.ArrayClass)
+ val arg = safelyRemovableUnboxArg(tree)
+ if (arg.isEmpty) ref(boxMethod(cls.asClass)).appliedTo(tree)
+ else {
+ ctx.log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
+ arg
+ }
+ }
+ }
+ }
+
+ def unbox(tree: Tree, pt: Type)(implicit ctx: Context): Tree = ctx.traceIndented(i"unboxing ${tree.showSummary}: ${tree.tpe} as a $pt") {
+ pt match {
+ case ErasedValueType(tycon, underlying) =>
+ def unboxedTree(t: Tree) =
+ adaptToType(t, tycon)
+ .select(valueClassUnbox(tycon.symbol.asClass))
+ .appliedToNone
+
+ // Null unboxing needs to be treated separately since we cannot call a method on null.
+ // "Unboxing" null to underlying is equivalent to doing null.asInstanceOf[underlying]
+ // See tests/pos/valueclasses/nullAsInstanceOfVC.scala for cases where this might happen.
+ val tree1 =
+ if (tree.tpe isRef defn.NullClass)
+ adaptToType(tree, underlying)
+ else if (!(tree.tpe <:< tycon)) {
+ assert(!(tree.tpe.typeSymbol.isPrimitiveValueClass))
+ val nullTree = Literal(Constant(null))
+ val unboxedNull = adaptToType(nullTree, underlying)
+
+ evalOnce(tree) { t =>
+ If(t.select(defn.Object_eq).appliedTo(nullTree),
+ unboxedNull,
+ unboxedTree(t))
+ }
+ } else unboxedTree(tree)
+
+ cast(tree1, pt)
+ case _ =>
+ val cls = pt.widen.classSymbol
+ if (cls eq defn.UnitClass) constant(tree, Literal(Constant(())))
+ else {
+ assert(cls ne defn.ArrayClass)
+ ref(unboxMethod(cls.asClass)).appliedTo(tree)
+ }
+ }
+ }
+
+ /** Generate a synthetic cast operation from tree.tpe to pt.
+ * Does not do any boxing/unboxing (this is handled upstream).
+ * Casts from and to ErasedValueType are special, see the explanation
+ * in ExtensionMethods#transform.
+ */
+ def cast(tree: Tree, pt: Type)(implicit ctx: Context): Tree = {
+ // TODO: The commented out assertion fails for tailcall/t6574.scala
+ // Fix the problem and enable the assertion.
+ // assert(!pt.isInstanceOf[SingletonType], pt)
+ if (pt isRef defn.UnitClass) unbox(tree, pt)
+ else (tree.tpe, pt) match {
+ case (JavaArrayType(treeElem), JavaArrayType(ptElem))
+ if treeElem.widen.isPrimitiveValueType && !ptElem.isPrimitiveValueType =>
+ // See SI-2386 for one example of when this might be necessary.
+ cast(ref(defn.runtimeMethodRef(nme.toObjectArray)).appliedTo(tree), pt)
+ case (_, ErasedValueType(tycon, _)) =>
+ ref(u2evt(tycon.symbol.asClass)).appliedTo(tree)
+ case _ =>
+ tree.tpe.widen match {
+ case ErasedValueType(tycon, _) =>
+ ref(evt2u(tycon.symbol.asClass)).appliedTo(tree)
+ case _ =>
+ if (pt.isPrimitiveValueType)
+ primitiveConversion(tree, pt.classSymbol)
+ else
+ tree.asInstance(pt)
+ }
+ }
+ }
+
+ /** Adaptation of an expression `e` to an expected type `PT`, applying the following
+ * rewritings exhaustively as long as the type of `e` is not a subtype of `PT`.
+ *
+ * e -> e() if `e` appears not as the function part of an application
+ * e -> box(e) if `e` is of erased value type
+ * e -> unbox(e, PT) otherwise, if `PT` is an erased value type
+ * e -> box(e) if `e` is of primitive type and `PT` is not a primitive type
+ * e -> unbox(e, PT) if `PT` is a primitive type and `e` is not of primitive type
+ * e -> cast(e, PT) otherwise
+ */
+ def adaptToType(tree: Tree, pt: Type)(implicit ctx: Context): Tree =
+ if (pt.isInstanceOf[FunProto]) tree
+ else tree.tpe.widen match {
+ case MethodType(Nil, _) if tree.isTerm =>
+ adaptToType(tree.appliedToNone, pt)
+ case tpw =>
+ if (pt.isInstanceOf[ProtoType] || tree.tpe <:< pt)
+ tree
+ else if (tpw.isErasedValueType)
+ adaptToType(box(tree), pt)
+ else if (pt.isErasedValueType)
+ adaptToType(unbox(tree, pt), pt)
+ else if (tpw.isPrimitiveValueType && !pt.isPrimitiveValueType)
+ adaptToType(box(tree), pt)
+ else if (pt.isPrimitiveValueType && !tpw.isPrimitiveValueType)
+ adaptToType(unbox(tree, pt), pt)
+ else
+ cast(tree, pt)
+ }
+ }
+
+ class Typer extends typer.ReTyper with NoChecking {
+ import Boxing._
+
+ def erasedType(tree: untpd.Tree)(implicit ctx: Context): Type = {
+ val tp = tree.typeOpt
+ if (tree.isTerm) erasedRef(tp) else valueErasure(tp)
+ }
+
+ override def promote(tree: untpd.Tree)(implicit ctx: Context): tree.ThisTree[Type] = {
+ assert(tree.hasType)
+ val erased = erasedType(tree)
+ ctx.log(s"promoting ${tree.show}: ${erased.showWithUnderlying()}")
+ tree.withType(erased)
+ }
+
+ /** When erasing most TypeTrees we should not semi-erase value types.
+ * This is not the case for [[DefDef#tpt]], [[ValDef#tpt]] and [[Typed#tpt]], they
+ * are handled separately by [[typedDefDef]], [[typedValDef]] and [[typedTyped]].
+ */
+ override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): TypeTree =
+ tree.withType(erasure(tree.tpe))
+
+ /** This override is only needed to semi-erase type ascriptions */
+ override def typedTyped(tree: untpd.Typed, pt: Type)(implicit ctx: Context): Tree = {
+ val Typed(expr, tpt) = tree
+ val tpt1 = promote(tpt)
+ val expr1 = typed(expr, tpt1.tpe)
+ assignType(untpd.cpy.Typed(tree)(expr1, tpt1), tpt1)
+ }
+
+ override def typedLiteral(tree: untpd.Literal)(implicit ctx: Context): Literal =
+ if (tree.typeOpt.isRef(defn.UnitClass)) tree.withType(tree.typeOpt)
+ else if (tree.const.tag == Constants.ClazzTag) Literal(Constant(erasure(tree.const.typeValue)))
+ else super.typedLiteral(tree)
+
+ /** Type check select nodes, applying the following rewritings exhaustively
+ * on selections `e.m`, where `OT` is the type of the owner of `m` and `ET`
+ * is the erased type of the selection's original qualifier expression.
+ *
+ * e.m1 -> e.m2 if `m1` is a member of Any or AnyVal and `m2` is
+ * the same-named member in Object.
+ * e.m -> box(e).m if `e` is primitive and `m` is a member or a reference class
+ * or `e` has an erased value class type.
+ * e.m -> unbox(e).m if `e` is not primitive and `m` is a member of a primtive type.
+ * e.m -> cast(e, OT).m if the type of `e` does not conform to OT and `m`
+ * is not an array operation.
+ *
+ * If `m` is an array operation, i.e. one of the members apply, update, length, clone, and
+ * <init> of class Array, we additionally try the following rewritings:
+ *
+ * e.m -> runtime.array_m(e) if ET is Object
+ * e.m -> cast(e, ET).m if the type of `e` does not conform to ET
+ * e.clone -> e.clone' where clone' is Object's clone method
+ * e.m -> e.[]m if `m` is an array operation other than `clone`.
+ */
+ override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
+ val sym = tree.symbol
+ assert(sym.exists, tree.show)
+
+ def select(qual: Tree, sym: Symbol): Tree = {
+ val name = tree.typeOpt match {
+ case tp: NamedType if tp.name.isShadowedName => sym.name.shadowedName
+ case _ => sym.name
+ }
+ untpd.cpy.Select(tree)(qual, sym.name)
+ .withType(NamedType.withFixedSym(qual.tpe, sym))
+ }
+
+ def selectArrayMember(qual: Tree, erasedPre: Type): Tree =
+ if (erasedPre isRef defn.ObjectClass)
+ runtimeCallWithProtoArgs(tree.name.genericArrayOp, pt, qual)
+ else if (!(qual.tpe <:< erasedPre))
+ selectArrayMember(cast(qual, erasedPre), erasedPre)
+ else
+ assignType(untpd.cpy.Select(tree)(qual, tree.name.primitiveArrayOp), qual)
+
+ def adaptIfSuper(qual: Tree): Tree = qual match {
+ case Super(thisQual, untpd.EmptyTypeIdent) =>
+ val SuperType(thisType, supType) = qual.tpe
+ if (sym.owner is Flags.Trait)
+ cpy.Super(qual)(thisQual, untpd.Ident(sym.owner.asClass.name))
+ .withType(SuperType(thisType, sym.owner.typeRef))
+ else
+ qual.withType(SuperType(thisType, thisType.firstParent))
+ case _ =>
+ qual
+ }
+
+ def recur(qual: Tree): Tree = {
+ val qualIsPrimitive = qual.tpe.widen.isPrimitiveValueType
+ val symIsPrimitive = sym.owner.isPrimitiveValueClass
+ if ((sym.owner eq defn.AnyClass) || (sym.owner eq defn.AnyValClass)) {
+ assert(sym.isConstructor, s"${sym.showLocated}")
+ select(qual, defn.ObjectClass.info.decl(sym.name).symbol)
+ }
+ else if (qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType)
+ recur(box(qual))
+ else if (!qualIsPrimitive && symIsPrimitive)
+ recur(unbox(qual, sym.owner.typeRef))
+ else if (sym.owner eq defn.ArrayClass)
+ selectArrayMember(qual, erasure(tree.qualifier.typeOpt.widen.finalResultType))
+ else {
+ val qual1 = adaptIfSuper(qual)
+ if (qual1.tpe.derivesFrom(sym.owner) || qual1.isInstanceOf[Super])
+ select(qual1, sym)
+ else
+ recur(cast(qual1, sym.owner.typeRef))
+ }
+ }
+
+ recur(typed(tree.qualifier, AnySelectionProto))
+ }
+
+ override def typedThis(tree: untpd.This)(implicit ctx: Context): Tree =
+ if (tree.symbol == ctx.owner.enclosingClass || tree.symbol.isStaticOwner) promote(tree)
+ else {
+ ctx.log(i"computing outer path from ${ctx.owner.ownersIterator.toList}%, % to ${tree.symbol}, encl class = ${ctx.owner.enclosingClass}")
+ outer.path(tree.symbol)
+ }
+
+ private def runtimeCallWithProtoArgs(name: Name, pt: Type, args: Tree*)(implicit ctx: Context): Tree = {
+ val meth = defn.runtimeMethodRef(name)
+ val followingParams = meth.symbol.info.firstParamTypes.drop(args.length)
+ val followingArgs = protoArgs(pt).zipWithConserve(followingParams)(typedExpr).asInstanceOf[List[tpd.Tree]]
+ ref(meth).appliedToArgs(args.toList ++ followingArgs)
+ }
+
+ private def protoArgs(pt: Type): List[untpd.Tree] = pt match {
+ case pt: FunProto => pt.args ++ protoArgs(pt.resType)
+ case _ => Nil
+ }
+
+ override def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context) = {
+ val ntree = interceptTypeApply(tree.asInstanceOf[TypeApply])(ctx.withPhase(ctx.erasurePhase))
+
+ ntree match {
+ case TypeApply(fun, args) =>
+ val fun1 = typedExpr(fun, WildcardType)
+ fun1.tpe.widen match {
+ case funTpe: PolyType =>
+ val args1 = args.mapconserve(typedType(_))
+ untpd.cpy.TypeApply(tree)(fun1, args1).withType(funTpe.instantiate(args1.tpes))
+ case _ => fun1
+ }
+ case _ => typedExpr(ntree, pt)
+ }
+ }
+
+ override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
+ val Apply(fun, args) = tree
+ if (fun.symbol == defn.dummyApply)
+ typedUnadapted(args.head, pt)
+ else typedExpr(fun, FunProto(args, pt, this)) match {
+ case fun1: Apply => // arguments passed in prototype were already passed
+ fun1
+ case fun1 =>
+ fun1.tpe.widen match {
+ case mt: MethodType =>
+ val outers = outer.args(fun.asInstanceOf[tpd.Tree]) // can't use fun1 here because its type is already erased
+ val args1 = (outers ::: args ++ protoArgs(pt)).zipWithConserve(mt.paramTypes)(typedExpr)
+ untpd.cpy.Apply(tree)(fun1, args1) withType mt.resultType
+ case _ =>
+ throw new MatchError(i"tree $tree has unexpected type of function ${fun1.tpe.widen}, was ${fun.typeOpt.widen}")
+ }
+ }
+ }
+
+ // The following four methods take as the proto-type the erasure of the pre-existing type,
+ // if the original proto-type is not a value type.
+ // This makes all branches be adapted to the correct type.
+ override def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(implicit ctx: Context) =
+ super.typedSeqLiteral(tree, erasure(tree.typeOpt))
+ // proto type of typed seq literal is original type;
+
+ override def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) =
+ super.typedIf(tree, adaptProto(tree, pt))
+
+ override def typedMatch(tree: untpd.Match, pt: Type)(implicit ctx: Context) =
+ super.typedMatch(tree, adaptProto(tree, pt))
+
+ override def typedTry(tree: untpd.Try, pt: Type)(implicit ctx: Context) =
+ super.typedTry(tree, adaptProto(tree, pt))
+
+ private def adaptProto(tree: untpd.Tree, pt: Type)(implicit ctx: Context) = {
+ if (pt.isValueType) pt else {
+ if (tree.typeOpt.derivesFrom(ctx.definitions.UnitClass))
+ tree.typeOpt
+ else valueErasure(tree.typeOpt)
+ }
+ }
+
+ override def typedValDef(vdef: untpd.ValDef, sym: Symbol)(implicit ctx: Context): ValDef =
+ super.typedValDef(untpd.cpy.ValDef(vdef)(
+ tpt = untpd.TypedSplice(TypeTree(sym.info).withPos(vdef.tpt.pos))), sym)
+
+ override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = {
+ val restpe =
+ if (sym.isConstructor) defn.UnitType
+ else sym.info.resultType
+ val ddef1 = untpd.cpy.DefDef(ddef)(
+ tparams = Nil,
+ vparamss = (outer.paramDefs(sym) ::: ddef.vparamss.flatten) :: Nil,
+ tpt = untpd.TypedSplice(TypeTree(restpe).withPos(ddef.tpt.pos)),
+ rhs = ddef.rhs match {
+ case id @ Ident(nme.WILDCARD) => untpd.TypedSplice(id.withType(restpe))
+ case _ => ddef.rhs
+ })
+ super.typedDefDef(ddef1, sym)
+ }
+
+ /** After erasure, we may have to replace the closure method by a bridge.
+ * LambdaMetaFactory handles this automatically for most types, but we have
+ * to deal with boxing and unboxing of value classes ourselves.
+ */
+ override def typedClosure(tree: untpd.Closure, pt: Type)(implicit ctx: Context) = {
+ val implClosure @ Closure(_, meth, _) = super.typedClosure(tree, pt)
+ implClosure.tpe match {
+ case SAMType(sam) =>
+ val implType = meth.tpe.widen
+
+ val List(implParamTypes) = implType.paramTypess
+ val List(samParamTypes) = sam.info.paramTypess
+ val implResultType = implType.resultType
+ val samResultType = sam.info.resultType
+
+ // Given a value class V with an underlying type U, the following code:
+ // val f: Function1[V, V] = x => ...
+ // results in the creation of a closure and a method:
+ // def $anonfun(v1: V): V = ...
+ // val f: Function1[V, V] = closure($anonfun)
+ // After [[Erasure]] this method will look like:
+ // def $anonfun(v1: ErasedValueType(V, U)): ErasedValueType(V, U) = ...
+ // And after [[ElimErasedValueType]] it will look like:
+ // def $anonfun(v1: U): U = ...
+ // This method does not implement the SAM of Function1[V, V] anymore and
+ // needs to be replaced by a bridge:
+ // def $anonfun$2(v1: V): V = new V($anonfun(v1.underlying))
+ // val f: Function1 = closure($anonfun$2)
+ // In general, a bridge is needed when the signature of the closure method after
+ // Erasure contains an ErasedValueType but the corresponding type in the functional
+ // interface is not an ErasedValueType.
+ val bridgeNeeded =
+ (implResultType :: implParamTypes, samResultType :: samParamTypes).zipped.exists(
+ (implType, samType) => implType.isErasedValueType && !samType.isErasedValueType
+ )
+
+ if (bridgeNeeded) {
+ val bridge = ctx.newSymbol(ctx.owner, nme.ANON_FUN, Flags.Synthetic | Flags.Method, sam.info)
+ val bridgeCtx = ctx.withOwner(bridge)
+ Closure(bridge, bridgeParamss => {
+ implicit val ctx: Context = bridgeCtx
+
+ val List(bridgeParams) = bridgeParamss
+ val rhs = Apply(meth, (bridgeParams, implParamTypes).zipped.map(adapt(_, _)))
+ adapt(rhs, sam.info.resultType)
+ })
+ } else implClosure
+ case _ =>
+ implClosure
+ }
+ }
+
+ override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(implicit ctx: Context) =
+ EmptyTree
+
+ override def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(implicit ctx: Context): List[Tree] = {
+ val stats1 = Trees.flatten(super.typedStats(stats, exprOwner))
+ if (ctx.owner.isClass) stats1 ::: addBridges(stats, stats1)(ctx) else stats1
+ }
+
+ // this implementation doesn't check for bridge clashes with value types!
+ def addBridges(oldStats: List[untpd.Tree], newStats: List[tpd.Tree])(implicit ctx: Context): List[tpd.Tree] = {
+ val beforeCtx = ctx.withPhase(ctx.erasurePhase)
+ def traverse(after: List[Tree], before: List[untpd.Tree],
+ emittedBridges: ListBuffer[tpd.DefDef] = ListBuffer[tpd.DefDef]()): List[tpd.DefDef] = {
+ after match {
+ case Nil => emittedBridges.toList
+ case (member: DefDef) :: newTail =>
+ before match {
+ case Nil => emittedBridges.toList
+ case (oldMember: untpd.DefDef) :: oldTail =>
+ try {
+ val oldSymbol = oldMember.symbol(beforeCtx)
+ val newSymbol = member.symbol(ctx)
+ assert(oldSymbol.name(beforeCtx) == newSymbol.name,
+ s"${oldSymbol.name(beforeCtx)} bridging with ${newSymbol.name}")
+ val newOverridden = oldSymbol.denot.allOverriddenSymbols.toSet // TODO: clarify new <-> old in a comment; symbols are swapped here
+ val oldOverridden = newSymbol.allOverriddenSymbols(beforeCtx).toSet // TODO: can we find a more efficient impl? newOverridden does not have to be a set!
+ def stillInBaseClass(sym: Symbol) = ctx.owner derivesFrom sym.owner
+ val neededBridges = (oldOverridden -- newOverridden).filter(stillInBaseClass)
+
+ var minimalSet = Set[Symbol]()
+ // compute minimal set of bridges that are needed:
+ for (bridge <- neededBridges) {
+ val isRequired = minimalSet.forall(nxtBridge => !(bridge.info =:= nxtBridge.info))
+
+ if (isRequired) {
+ // check for clashes
+ val clash: Option[Symbol] = oldSymbol.owner.info.decls.lookupAll(bridge.name).find {
+ sym =>
+ (sym.name eq bridge.name) && sym.info.widen =:= bridge.info.widen
+ }.orElse(
+ emittedBridges.find(stat => (stat.name == bridge.name) && stat.tpe.widen =:= bridge.info.widen)
+ .map(_.symbol))
+ clash match {
+ case Some(cl) =>
+ ctx.error(i"bridge for method ${newSymbol.showLocated(beforeCtx)} of type ${newSymbol.info(beforeCtx)}\n" +
+ i"clashes with ${cl.symbol.showLocated(beforeCtx)} of type ${cl.symbol.info(beforeCtx)}\n" +
+ i"both have same type after erasure: ${bridge.symbol.info}")
+ case None => minimalSet += bridge
+ }
+ }
+ }
+
+ val bridgeImplementations = minimalSet.map {
+ sym => makeBridgeDef(member, sym)(ctx)
+ }
+ emittedBridges ++= bridgeImplementations
+ } catch {
+ case ex: MergeError => ctx.error(ex.getMessage, member.pos)
+ }
+
+ traverse(newTail, oldTail, emittedBridges)
+ case notADefDef :: oldTail =>
+ traverse(after, oldTail, emittedBridges)
+ }
+ case notADefDef :: newTail =>
+ traverse(newTail, before, emittedBridges)
+ }
+ }
+
+ traverse(newStats, oldStats)
+ }
+
+ private final val NoBridgeFlags = Flags.Accessor | Flags.Deferred | Flags.Lazy | Flags.ParamAccessor
+
+ /** Create a bridge DefDef which overrides a parent method.
+ *
+ * @param newDef The DefDef which needs bridging because its signature
+ * does not match the parent method signature
+ * @param parentSym A symbol corresponding to the parent method to override
+ * @return A new DefDef whose signature matches the parent method
+ * and whose body only contains a call to newDef
+ */
+ def makeBridgeDef(newDef: tpd.DefDef, parentSym: Symbol)(implicit ctx: Context): tpd.DefDef = {
+ val newDefSym = newDef.symbol
+ val currentClass = newDefSym.owner.asClass
+
+ def error(reason: String) = {
+ assert(false, s"failure creating bridge from ${newDefSym} to ${parentSym}, reason: $reason")
+ ???
+ }
+ var excluded = NoBridgeFlags
+ if (!newDefSym.is(Flags.Protected)) excluded |= Flags.Protected // needed to avoid "weaker access" assertion failures in expandPrivate
+ val bridge = ctx.newSymbol(currentClass,
+ parentSym.name, parentSym.flags &~ excluded | Flags.Bridge, parentSym.info, coord = newDefSym.owner.coord).asTerm
+ bridge.enteredAfter(ctx.phase.prev.asInstanceOf[DenotTransformer]) // this should be safe, as we're executing in context of next phase
+ ctx.debuglog(s"generating bridge from ${newDefSym} to $bridge")
+
+ val sel: Tree = This(currentClass).select(newDefSym.termRef)
+
+ val resultType = parentSym.info.widen.resultType
+
+ val bridgeCtx = ctx.withOwner(bridge)
+
+ tpd.DefDef(bridge, { paramss: List[List[tpd.Tree]] =>
+ implicit val ctx: Context = bridgeCtx
+
+ val rhs = paramss.foldLeft(sel)((fun, vparams) =>
+ fun.tpe.widen match {
+ case MethodType(names, types) => Apply(fun, (vparams, types).zipped.map(adapt(_, _, untpd.EmptyTree)))
+ case a => error(s"can not resolve apply type $a")
+
+ })
+ adapt(rhs, resultType)
+ })
+ }
+
+ override def adapt(tree: Tree, pt: Type, original: untpd.Tree)(implicit ctx: Context): Tree =
+ ctx.traceIndented(i"adapting ${tree.showSummary}: ${tree.tpe} to $pt", show = true) {
+ assert(ctx.phase == ctx.erasurePhase.next, ctx.phase)
+ if (tree.isEmpty) tree
+ else if (ctx.mode is Mode.Pattern) tree // TODO: replace with assertion once pattern matcher is active
+ else adaptToType(tree, pt)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
new file mode 100644
index 000000000..83cd395ff
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
@@ -0,0 +1,111 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.DenotTransformers.{SymTransformer, IdentityDenotTransformer}
+import Contexts.Context
+import Symbols._
+import Scopes._
+import Flags._
+import StdNames._
+import SymDenotations._
+import Types._
+import collection.mutable
+import TreeTransforms._
+import Decorators._
+import ast.Trees._
+import TreeTransforms._
+import java.io.File.separatorChar
+import ValueClasses._
+
+/** Make private term members that are accessed from another class
+ * non-private by resetting the Private flag and expanding their name.
+ *
+ * Make private accessor in value class not-private. Ihis is necessary to unbox
+ * the value class when accessing it from separate compilation units
+ *
+ * Also, make non-private any private parameter forwarders that forward to an inherited
+ * public or protected parameter accessor with the same name as the forwarder.
+ * This is necessary since private methods are not allowed to have the same name
+ * as inherited public ones.
+ *
+ * See discussion in https://github.com/lampepfl/dotty/pull/784
+ * and https://github.com/lampepfl/dotty/issues/783
+ */
+class ExpandPrivate extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "expandPrivate"
+
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
+ tree match {
+ case t: DefDef =>
+ val sym = t.symbol
+ def hasWeakerAccess(other: Symbol) = {
+ // public > protected > /* default */ > private
+ if (sym.is(Private)) other.is(Private)
+ else if (sym.is(Protected)) other.is(Protected | Private)
+ else true // sym is public
+ }
+ val fail = sym.allOverriddenSymbols.findSymbol(x => !hasWeakerAccess(x))
+ if (fail.exists) {
+ assert(false, i"${sym.showFullName}: ${sym.info} has weaker access than superclass method ${fail.showFullName}: ${fail.info}")
+ }
+ case _ =>
+ }
+ }
+
+ private def isVCPrivateParamAccessor(d: SymDenotation)(implicit ctx: Context) =
+ d.isTerm && d.is(PrivateParamAccessor) && isDerivedValueClass(d.owner)
+
+ /** Make private terms accessed from different classes non-private.
+ * Note: this happens also for accesses between class and linked module class.
+ * If we change the scheme at one point to make static module class computations
+ * static members of the companion class, we should tighten the condition below.
+ */
+ private def ensurePrivateAccessible(d: SymDenotation)(implicit ctx: Context) =
+ if (isVCPrivateParamAccessor(d))
+ d.ensureNotPrivate.installAfter(thisTransform)
+ else if (d.is(PrivateTerm) && d.owner != ctx.owner.enclosingClass) {
+ // Paths `p1` and `p2` are similar if they have a common suffix that follows
+ // possibly different directory paths. That is, their common suffix extends
+ // in both cases either to the start of the path or to a file separator character.
+ def isSimilar(p1: String, p2: String): Boolean = {
+ var i = p1.length - 1
+ var j = p2.length - 1
+ while (i >= 0 && j >= 0 && p1(i) == p2(j) && p1(i) != separatorChar) {
+ i -= 1
+ j -= 1
+ }
+ (i < 0 || p1(i) == separatorChar) &&
+ (j < 0 || p1(j) == separatorChar)
+ }
+ assert(isSimilar(d.symbol.sourceFile.path, ctx.source.file.path),
+ i"private ${d.symbol.showLocated} in ${d.symbol.sourceFile} accessed from ${ctx.owner.showLocated} in ${ctx.source.file}")
+ d.ensureNotPrivate.installAfter(thisTransform)
+ }
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) = {
+ ensurePrivateAccessible(tree.symbol)
+ tree
+ }
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) = {
+ ensurePrivateAccessible(tree.symbol)
+ tree
+ }
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
+ val sym = tree.symbol
+ tree.rhs match {
+ case Apply(sel @ Select(_: Super, _), _)
+ if sym.is(PrivateParamAccessor) && sel.symbol.is(ParamAccessor) && sym.name == sel.symbol.name =>
+ sym.ensureNotPrivate.installAfter(thisTransform)
+ case _ =>
+ if (isVCPrivateParamAccessor(sym))
+ sym.ensureNotPrivate.installAfter(thisTransform)
+ }
+ tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
new file mode 100644
index 000000000..91399f91a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
@@ -0,0 +1,86 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._
+import SymDenotations.SymDenotation
+import TreeTransforms._
+import SymUtils._
+import ast.untpd
+import ast.Trees._
+
+/** Expand SAM closures that cannot be represented by the JVM as lambdas to anonymous classes.
+ * These fall into five categories
+ *
+ * 1. Partial function closures, we need to generate a isDefinedAt method for these.
+ * 2. Closures implementing non-trait classes.
+ * 3. Closures implementing classes that inherit from a class other than Object
+ * (a lambda cannot not be a run-time subtype of such a class)
+ * 4. Closures that implement traits which run initialization code.
+ * 5. Closures that get synthesized abstract methods in the transformation pipeline. These methods can be
+ * (1) superaccessors, (2) outer references, (3) accessors for fields.
+ */
+class ExpandSAMs extends MiniPhaseTransform { thisTransformer =>
+ override def phaseName = "expandSAMs"
+
+ import ast.tpd._
+
+ /** Is the SAMType `cls` also a SAM under the rules of the platform? */
+ def isPlatformSam(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ ctx.platform.isSam(cls)
+
+ override def transformBlock(tree: Block)(implicit ctx: Context, info: TransformerInfo): Tree = tree match {
+ case Block(stats @ (fn: DefDef) :: Nil, Closure(_, fnRef, tpt)) if fnRef.symbol == fn.symbol =>
+ tpt.tpe match {
+ case NoType => tree // it's a plain function
+ case tpe @ SAMType(_) if tpe.isRef(defn.PartialFunctionClass) =>
+ toPartialFunction(tree)
+ case tpe @ SAMType(_) if isPlatformSam(tpe.classSymbol.asClass) =>
+ tree
+ case tpe =>
+ val Seq(samDenot) = tpe.abstractTermMembers.filter(!_.symbol.is(SuperAccessor))
+ cpy.Block(tree)(stats,
+ AnonClass(tpe :: Nil, fn.symbol.asTerm :: Nil, samDenot.symbol.asTerm.name :: Nil))
+ }
+ case _ =>
+ tree
+ }
+
+ private def toPartialFunction(tree: Block)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val Block(
+ (applyDef @ DefDef(nme.ANON_FUN, Nil, List(List(param)), _, _)) :: Nil,
+ Closure(_, _, tpt)) = tree
+ val applyRhs: Tree = applyDef.rhs
+ val applyFn = applyDef.symbol.asTerm
+
+ val MethodType(paramNames, paramTypes) = applyFn.info
+ val isDefinedAtFn = applyFn.copy(
+ name = nme.isDefinedAt,
+ flags = Synthetic | Method,
+ info = MethodType(paramNames, paramTypes, defn.BooleanType)).asTerm
+ val tru = Literal(Constant(true))
+ def isDefinedAtRhs(paramRefss: List[List[Tree]]) = applyRhs match {
+ case Match(selector, cases) =>
+ assert(selector.symbol == param.symbol)
+ val paramRef = paramRefss.head.head
+ // Again, the alternative
+ // val List(List(paramRef)) = paramRefs
+ // fails with a similar self instantiation error
+ def translateCase(cdef: CaseDef): CaseDef =
+ cpy.CaseDef(cdef)(body = tru).changeOwner(applyFn, isDefinedAtFn)
+ val defaultSym = ctx.newSymbol(isDefinedAtFn, nme.WILDCARD, Synthetic, selector.tpe.widen)
+ val defaultCase =
+ CaseDef(
+ Bind(defaultSym, Underscore(selector.tpe.widen)),
+ EmptyTree,
+ Literal(Constant(false)))
+ val annotated = Annotated(paramRef, New(ref(defn.UncheckedAnnotType)))
+ cpy.Match(applyRhs)(annotated, cases.map(translateCase) :+ defaultCase)
+ case _ =>
+ tru
+ }
+ val isDefinedAtDef = transformFollowingDeep(DefDef(isDefinedAtFn, isDefinedAtRhs(_)))
+ val anonCls = AnonClass(tpt.tpe :: Nil, List(applyFn, isDefinedAtFn), List(nme.apply, nme.isDefinedAt))
+ cpy.Block(tree)(List(applyDef, isDefinedAtDef), anonCls)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
new file mode 100644
index 000000000..3fec47e9f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
@@ -0,0 +1,362 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import core.StdNames.nme
+import core.Names._
+import core.NameOps._
+import ast.Trees._
+import SymUtils._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Phases.Phase
+import util.Property
+import collection.mutable
+
+/** This phase adds outer accessors to classes and traits that need them.
+ * Compared to Scala 2.x, it tries to minimize the set of classes
+ * that take outer accessors by scanning class implementations for
+ * outer references.
+ *
+ * The following things are delayed until erasure and are performed
+ * by class OuterOps:
+ *
+ * - add outer parameters to constructors
+ * - pass outer arguments in constructor calls
+ *
+ * replacement of outer this by outer paths is done in Erasure.
+ * needs to run after pattern matcher as it can add outer checks and force creation of $outer
+ */
+class ExplicitOuter extends MiniPhaseTransform with InfoTransformer { thisTransformer =>
+ import ExplicitOuter._
+ import ast.tpd._
+
+ val Outer = new Property.Key[Tree]
+
+ override def phaseName: String = "explicitOuter"
+
+ /** List of names of phases that should have finished their processing of all compilation units
+ * before this phase starts
+ */
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[PatternMatcher])
+
+ /** Add outer accessors if a class always needs an outer pointer */
+ override def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context) = tp match {
+ case tp @ ClassInfo(_, cls, _, decls, _) if needsOuterAlways(cls) && !sym.is(JavaDefined) =>
+ val newDecls = decls.cloneScope
+ newOuterAccessors(cls).foreach(newDecls.enter)
+ tp.derivedClassInfo(decls = newDecls)
+ case _ =>
+ tp
+ }
+
+ override def mayChange(sym: Symbol)(implicit ctx: Context): Boolean = sym.isClass
+
+ /** Convert a selection of the form `qual.C_<OUTER>` to an outer path from `qual` to `C` */
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) =
+ if (tree.name.isOuterSelect)
+ outer.path(tree.tpe.widen.classSymbol, tree.qualifier).ensureConforms(tree.tpe)
+ else tree
+
+ /** First, add outer accessors if a class does not have them yet and it references an outer this.
+ * If the class has outer accessors, implement them.
+ * Furthermore, if a parent trait might have an outer accessor,
+ * provide an implementation for the outer accessor by computing the parent's
+ * outer from the parent type prefix. If the trait ends up not having an outer accessor
+ * after all, the implementation is redundant, but does not harm.
+ * The same logic is not done for non-trait parent classes because for them the outer
+ * pointer is passed in the super constructor, which will be implemented later in
+ * a separate phase which needs to run after erasure. However, we make sure here
+ * that the super class constructor is indeed a New, and not just a type.
+ */
+ override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val cls = ctx.owner.asClass
+ val isTrait = cls.is(Trait)
+ if (needsOuterIfReferenced(cls) &&
+ !needsOuterAlways(cls) &&
+ impl.existsSubTree(referencesOuter(cls, _)))
+ ensureOuterAccessors(cls)
+ if (hasOuter(cls)) {
+ val newDefs = new mutable.ListBuffer[Tree]
+ if (isTrait)
+ newDefs += DefDef(outerAccessor(cls).asTerm, EmptyTree)
+ else {
+ val outerParamAcc = outerParamAccessor(cls)
+ newDefs += ValDef(outerParamAcc, EmptyTree)
+ newDefs += DefDef(outerAccessor(cls).asTerm, ref(outerParamAcc))
+ }
+
+ for (parentTrait <- cls.mixins) {
+ if (needsOuterIfReferenced(parentTrait)) {
+ val parentTp = cls.denot.thisType.baseTypeRef(parentTrait)
+ val outerAccImpl = newOuterAccessor(cls, parentTrait).enteredAfter(thisTransformer)
+ newDefs += DefDef(outerAccImpl, singleton(outerPrefix(parentTp)))
+ }
+ }
+
+ val parents1 =
+ for (parent <- impl.parents) yield {
+ val parentCls = parent.tpe.classSymbol.asClass
+ if (parentCls.is(Trait)) {
+ parent
+ }
+ else parent match { // ensure class parent is a constructor
+ case parent: TypeTree => New(parent.tpe, Nil).withPos(impl.pos)
+ case _ => parent
+ }
+ }
+ cpy.Template(impl)(parents = parents1, body = impl.body ++ newDefs)
+ }
+ else impl
+ }
+
+ override def transformClosure(tree: Closure)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (tree.tpt ne EmptyTree) {
+ val cls = tree.tpt.asInstanceOf[TypeTree].tpe.classSymbol
+ if (cls.exists && hasOuter(cls.asClass))
+ ctx.error("Not a single abstract method type, requires an outer pointer", tree.pos)
+ }
+ tree
+ }
+}
+
+object ExplicitOuter {
+ import ast.tpd._
+
+ /** Ensure that class `cls` has outer accessors */
+ def ensureOuterAccessors(cls: ClassSymbol)(implicit ctx: Context): Unit = {
+ //todo: implementing #165 would simplify this logic
+ val prevPhase = ctx.phase.prev
+ assert(prevPhase.id <= ctx.explicitOuterPhase.id, "can add $outer symbols only before ExplicitOuter")
+ assert(prevPhase.isInstanceOf[DenotTransformer], "adding outerAccessors requires being DenotTransformer")
+ if (!hasOuter(cls)) {
+ newOuterAccessors(cls).foreach(_.enteredAfter(prevPhase.asInstanceOf[DenotTransformer]))
+ }
+ }
+
+ /** The outer accessor and potentially outer param accessor needed for class `cls` */
+ private def newOuterAccessors(cls: ClassSymbol)(implicit ctx: Context) =
+ newOuterAccessor(cls, cls) :: (if (cls is Trait) Nil else newOuterParamAccessor(cls) :: Nil)
+
+ /** A new outer accessor or param accessor */
+ private def newOuterSym(owner: ClassSymbol, cls: ClassSymbol, name: TermName, flags: FlagSet)(implicit ctx: Context) = {
+ val target = cls.owner.enclosingClass.typeRef
+ val info = if (flags.is(Method)) ExprType(target) else target
+ ctx.newSymbol(owner, name, Synthetic | flags, info, coord = cls.coord)
+ }
+
+ /** A new param accessor for the outer field in class `cls` */
+ private def newOuterParamAccessor(cls: ClassSymbol)(implicit ctx: Context) =
+ newOuterSym(cls, cls, nme.OUTER, Private | ParamAccessor)
+
+ /** A new outer accessor for class `cls` which is a member of `owner` */
+ private def newOuterAccessor(owner: ClassSymbol, cls: ClassSymbol)(implicit ctx: Context) = {
+ val deferredIfTrait = if (owner.is(Trait)) Deferred else EmptyFlags
+ val outerAccIfOwn = if (owner == cls) OuterAccessor else EmptyFlags
+ newOuterSym(owner, cls, outerAccName(cls),
+ Final | Method | Stable | outerAccIfOwn | deferredIfTrait)
+ }
+
+ private def outerAccName(cls: ClassSymbol)(implicit ctx: Context): TermName =
+ nme.OUTER.expandedName(cls)
+
+ /** Class needs an outer pointer, provided there is a reference to an outer this in it. */
+ def needsOuterIfReferenced(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ !(cls.isStatic ||
+ cls.owner.enclosingClass.isStaticOwner ||
+ cls.is(PureInterface)
+ )
+
+ /** Class unconditionally needs an outer pointer. This is the case if
+ * the class needs an outer pointer if referenced and one of the following holds:
+ * - we might not know at all instantiation sites whether outer is referenced or not
+ * - we need to potentially pass along outer to a parent class or trait
+ */
+ private def needsOuterAlways(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ needsOuterIfReferenced(cls) &&
+ (!hasLocalInstantiation(cls) || // needs outer because we might not know whether outer is referenced or not
+ cls.classInfo.parents.exists(parent => // needs outer to potentially pass along to parent
+ needsOuterIfReferenced(parent.classSymbol.asClass)))
+
+ /** Class is always instantiated in the compilation unit where it is defined */
+ private def hasLocalInstantiation(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ // scala2x modules always take an outer pointer(as of 2.11)
+ // dotty modules are always locally instantiated
+ cls.owner.isTerm || cls.is(Private) || cls.is(Module, butNot = Scala2x)
+
+ /** The outer parameter accessor of cass `cls` */
+ private def outerParamAccessor(cls: ClassSymbol)(implicit ctx: Context): TermSymbol =
+ cls.info.decl(nme.OUTER).symbol.asTerm
+
+ /** The outer accessor of class `cls`. To find it is a bit tricky. The
+ * class might have been moved with new owners between ExplicitOuter and Erasure,
+ * where the method is also called. For instance, it might have been part
+ * of a by-name argument, and therefore be moved under a closure method
+ * by ElimByName. In that case looking up the method again at Erasure with the
+ * fully qualified name `outerAccName` will fail, because the `outerAccName`'s
+ * result is phase dependent. In that case we use a backup strategy where we search all
+ * definitions in the class to find the one with the OuterAccessor flag.
+ */
+ def outerAccessor(cls: ClassSymbol)(implicit ctx: Context): Symbol =
+ if (cls.isStatic) NoSymbol // fast return to avoid scanning package decls
+ else cls.info.member(outerAccName(cls)).suchThat(_ is OuterAccessor).symbol orElse
+ cls.info.decls.find(_ is OuterAccessor).getOrElse(NoSymbol)
+
+ /** Class has an outer accessor. Can be called only after phase ExplicitOuter. */
+ private def hasOuter(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ needsOuterIfReferenced(cls) && outerAccessor(cls).exists
+
+ /** Class constructor takes an outer argument. Can be called only after phase ExplicitOuter. */
+ private def hasOuterParam(cls: ClassSymbol)(implicit ctx: Context): Boolean =
+ !cls.is(Trait) && needsOuterIfReferenced(cls) && outerAccessor(cls).exists
+
+ /** Tree references an outer class of `cls` which is not a static owner.
+ */
+ def referencesOuter(cls: Symbol, tree: Tree)(implicit ctx: Context): Boolean = {
+ def isOuterSym(sym: Symbol) =
+ !sym.isStaticOwner && cls.isProperlyContainedIn(sym)
+ def isOuterRef(ref: Type): Boolean = ref match {
+ case ref: ThisType =>
+ isOuterSym(ref.cls)
+ case ref: TermRef =>
+ if (ref.prefix ne NoPrefix)
+ !ref.symbol.isStatic && isOuterRef(ref.prefix)
+ else (
+ (ref.symbol is Hoistable) &&
+ // ref.symbol will be placed in enclosing class scope by LambdaLift, so it might need
+ // an outer path then.
+ isOuterSym(ref.symbol.owner.enclosingClass)
+ ||
+ // If not hoistable, ref.symbol will get a proxy in immediately enclosing class. If this properly
+ // contains the current class, it needs an outer path.
+ // If the symbol is hoistable, it might have free variables for which the same
+ // reasoning applies. See pos/i1664.scala
+ ctx.owner.enclosingClass.owner.enclosingClass.isContainedIn(ref.symbol.owner)
+ )
+ case _ => false
+ }
+ def hasOuterPrefix(tp: Type) = tp match {
+ case TypeRef(prefix, _) => isOuterRef(prefix)
+ case _ => false
+ }
+ tree match {
+ case _: This | _: Ident => isOuterRef(tree.tpe)
+ case nw: New =>
+ val newCls = nw.tpe.classSymbol
+ isOuterSym(newCls.owner.enclosingClass) ||
+ hasOuterPrefix(nw.tpe) ||
+ newCls.owner.isTerm && cls.isProperlyContainedIn(newCls)
+ // newCls might get proxies for free variables. If current class is
+ // properly contained in newCls, it needs an outer path to newCls access the
+ // proxies and forward them to the new instance.
+ case _ =>
+ false
+ }
+ }
+
+ private final val Hoistable = Method | Lazy | Module
+
+ /** The outer prefix implied by type `tpe` */
+ private def outerPrefix(tpe: Type)(implicit ctx: Context): Type = tpe match {
+ case tpe: TypeRef =>
+ tpe.symbol match {
+ case cls: ClassSymbol =>
+ if (tpe.prefix eq NoPrefix) cls.owner.enclosingClass.thisType
+ else tpe.prefix
+ case _ =>
+ outerPrefix(tpe.underlying)
+ }
+ case tpe: TypeProxy =>
+ outerPrefix(tpe.underlying)
+ }
+
+ def outer(implicit ctx: Context): OuterOps = new OuterOps(ctx)
+
+ /** The operations in this class
+ * - add outer parameters
+ * - pass outer arguments to these parameters
+ * - replace outer this references by outer paths.
+ * They are called from erasure. There are two constraints which
+ * suggest these operations should be done in erasure.
+ * - Replacing this references with outer paths loses aliasing information,
+ * so programs will not typecheck with unerased types unless a lot of type
+ * refinements are added. Therefore, outer paths should be computed no
+ * earlier than erasure.
+ * - outer parameters should not show up in signatures, so again
+ * they cannot be added before erasure.
+ * - outer arguments need access to outer parameters as well as to the
+ * original type prefixes of types in New expressions. These prefixes
+ * get erased during erasure. Therefore, outer arguments have to be passed
+ * no later than erasure.
+ */
+ class OuterOps(val ictx: Context) extends AnyVal {
+ private implicit def ctx: Context = ictx
+
+ /** If `cls` has an outer parameter add one to the method type `tp`. */
+ def addParam(cls: ClassSymbol, tp: Type): Type =
+ if (hasOuterParam(cls)) {
+ val mt @ MethodType(pnames, ptypes) = tp
+ mt.derivedMethodType(
+ nme.OUTER :: pnames, cls.owner.enclosingClass.typeRef :: ptypes, mt.resultType)
+ } else tp
+
+ /** If function in an apply node is a constructor that needs to be passed an
+ * outer argument, the singleton list with the argument, otherwise Nil.
+ */
+ def args(fun: Tree): List[Tree] = {
+ if (fun.symbol.isConstructor) {
+ val cls = fun.symbol.owner.asClass
+ def outerArg(receiver: Tree): Tree = receiver match {
+ case New(_) | Super(_, _) =>
+ singleton(outerPrefix(receiver.tpe))
+ case This(_) =>
+ ref(outerParamAccessor(cls)) // will be rewired to outer argument of secondary constructor in phase Constructors
+ case TypeApply(Select(r, nme.asInstanceOf_), args) =>
+ outerArg(r) // cast was inserted, skip
+ }
+ if (hasOuterParam(cls))
+ methPart(fun) match {
+ case Select(receiver, _) => outerArg(receiver).withPos(fun.pos) :: Nil
+ }
+ else Nil
+ } else Nil
+ }
+
+ /** The path of outer accessors that references `toCls.this` starting from
+ * the context owner's this node.
+ */
+ def path(toCls: Symbol, start: Tree = This(ctx.owner.enclosingClass.asClass)): Tree = try {
+ def loop(tree: Tree): Tree = {
+ val treeCls = tree.tpe.widen.classSymbol
+ val outerAccessorCtx = ctx.withPhaseNoLater(ctx.lambdaLiftPhase) // lambdalift mangles local class names, which means we cannot reliably find outer acessors anymore
+ ctx.log(i"outer to $toCls of $tree: ${tree.tpe}, looking for ${outerAccName(treeCls.asClass)(outerAccessorCtx)} in $treeCls")
+ if (treeCls == toCls) tree
+ else {
+ val acc = outerAccessor(treeCls.asClass)(outerAccessorCtx)
+ assert(acc.exists,
+ i"failure to construct path from ${ctx.owner.ownersIterator.toList}%/% to `this` of ${toCls.showLocated};\n${treeCls.showLocated} does not have an outer accessor")
+ loop(tree.select(acc).ensureApplied)
+ }
+ }
+ ctx.log(i"computing outerpath to $toCls from ${ctx.outersIterator.map(_.owner).toList}")
+ loop(start)
+ } catch {
+ case ex: ClassCastException =>
+ throw new ClassCastException(i"no path exists from ${ctx.owner.enclosingClass} to $toCls")
+ }
+
+ /** The outer parameter definition of a constructor if it needs one */
+ def paramDefs(constr: Symbol): List[ValDef] =
+ if (constr.isConstructor && hasOuterParam(constr.owner.asClass)) {
+ val MethodType(outerName :: _, outerType :: _) = constr.info
+ val outerSym = ctx.newSymbol(constr, outerName, Param, outerType)
+ ValDef(outerSym) :: Nil
+ }
+ else Nil
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
new file mode 100644
index 000000000..7bb65e575
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
@@ -0,0 +1,47 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Contexts.Context
+import Types._
+import TreeTransforms._
+import Decorators._
+import ast.Trees._
+import Flags._
+
+/** Transform references of the form
+ *
+ * C.this.m
+ *
+ * where `C` is a class with explicit self type and `C` is not a
+ * subclass of the owner of `m` to
+ *
+ * C.this.asInstanceOf[S & C.this.type].m
+ *
+ * where `S` is the self type of `C`.
+ * See run/i789.scala for a test case why this is needed.
+ *
+ * Also replaces idents referring to the self type with ThisTypes.
+ */
+class ExplicitSelf extends MiniPhaseTransform { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName = "explicitSelf"
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) = tree.tpe match {
+ case tp: ThisType =>
+ ctx.debuglog(s"owner = ${ctx.owner}, context = ${ctx}")
+ This(tp.cls) withPos tree.pos
+ case _ => tree
+ }
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree = tree match {
+ case Select(thiz: This, name) if name.isTermName =>
+ val cls = thiz.symbol.asClass
+ val cinfo = cls.classInfo
+ if (cinfo.givenSelfType.exists && !cls.derivesFrom(tree.symbol.owner))
+ cpy.Select(tree)(thiz.asInstance(AndType(cinfo.selfType, thiz.tpe)), name)
+ else tree
+ case _ => tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
new file mode 100644
index 000000000..5ae4e8a54
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -0,0 +1,243 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.transform.TreeTransforms._
+import ValueClasses._
+import dotty.tools.dotc.ast.{Trees, tpd}
+import scala.collection.{ mutable, immutable }
+import mutable.ListBuffer
+import core._
+import dotty.tools.dotc.core.Phases.{NeedsCompanions, Phase}
+import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
+import TypeErasure.{ valueErasure, ErasedValueType }
+import TypeUtils._
+import util.Positions._
+import Decorators._
+import SymUtils._
+
+/**
+ * Perform Step 1 in the inline classes SIP: Creates extension methods for all
+ * methods in a value class, except parameter or super accessors, or constructors.
+ *
+ * Additionally, for a value class V, let U be the underlying type after erasure. We add
+ * to the companion module of V two cast methods:
+ * def u2evt$(x0: U): ErasedValueType(V, U)
+ * def evt2u$(x0: ErasedValueType(V, U)): U
+ * The casts are used in [[Erasure]] to make it typecheck, they are then removed
+ * in [[ElimErasedValueType]].
+ * This is different from the implementation of value classes in Scala 2
+ * (see SIP-15) which uses `asInstanceOf` which does not typecheck.
+ *
+ * Finally, if the constructor of a value class is private pr protected
+ * it is widened to public.
+ */
+class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with FullParameterization { thisTransformer =>
+
+ import tpd._
+ import ExtensionMethods._
+
+ /** the following two members override abstract members in Transform */
+ override def phaseName: String = "extmethods"
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[ElimRepeated])
+
+ override def runsAfterGroupsOf = Set(classOf[FirstTransform]) // need companion objects to exist
+
+ override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref match {
+ case moduleClassSym: ClassDenotation if moduleClassSym is ModuleClass =>
+ moduleClassSym.linkedClass match {
+ case valueClass: ClassSymbol if isDerivedValueClass(valueClass) =>
+ val cinfo = moduleClassSym.classInfo
+ val decls1 = cinfo.decls.cloneScope
+ val moduleSym = moduleClassSym.symbol.asClass
+
+ var newSuperClass: Type = null
+
+ ctx.atPhase(thisTransformer.next) { implicit ctx =>
+ // In Scala 2, extension methods are added before pickling so we should
+ // not generate them again.
+ if (!(valueClass is Scala2x)) ctx.atPhase(thisTransformer) { implicit ctx =>
+ for (decl <- valueClass.classInfo.decls) {
+ if (isMethodWithExtension(decl))
+ decls1.enter(createExtensionMethod(decl, moduleClassSym.symbol))
+ }
+ }
+
+ val underlying = valueErasure(underlyingOfValueClass(valueClass))
+ val evt = ErasedValueType(valueClass.typeRef, underlying)
+ val u2evtSym = ctx.newSymbol(moduleSym, nme.U2EVT, Synthetic | Method,
+ MethodType(List(nme.x_0), List(underlying), evt))
+ val evt2uSym = ctx.newSymbol(moduleSym, nme.EVT2U, Synthetic | Method,
+ MethodType(List(nme.x_0), List(evt), underlying))
+
+ val defn = ctx.definitions
+
+ val underlyingCls = underlying.classSymbol
+ val underlyingClsName =
+ if (underlyingCls.isNumericValueClass || underlyingCls == defn.BooleanClass) underlyingCls.name
+ else nme.Object
+
+ val syp = ctx.requiredClass(s"dotty.runtime.vc.VC${underlyingClsName}Companion").asClass
+
+ newSuperClass = tpd.ref(syp).select(nme.CONSTRUCTOR).appliedToType(valueClass.typeRef).tpe.resultType
+
+ decls1.enter(u2evtSym)
+ decls1.enter(evt2uSym)
+ }
+
+ // Add the extension methods, the cast methods u2evt$ and evt2u$, and a VC*Companion superclass
+ moduleClassSym.copySymDenotation(info =
+ cinfo.derivedClassInfo(
+ // FIXME: use of VC*Companion superclasses is disabled until the conflicts with SyntheticMethods are solved.
+ //classParents = ctx.normalizeToClassRefs(List(newSuperClass), moduleSym, decls1),
+ decls = decls1))
+ case _ =>
+ moduleClassSym
+ }
+ case ref: SymDenotation =>
+ if (isMethodWithExtension(ref) && ref.hasAnnotation(defn.TailrecAnnot)) {
+ val ref1 = ref.copySymDenotation()
+ ref1.removeAnnotation(defn.TailrecAnnot)
+ ref1
+ }
+ else if (ref.isConstructor && isDerivedValueClass(ref.owner) && ref.is(AccessFlags)) {
+ val ref1 = ref.copySymDenotation()
+ ref1.resetFlag(AccessFlags)
+ ref1
+ }
+ else ref
+ case _ =>
+ ref
+ }
+
+ protected def rewiredTarget(target: Symbol, derived: Symbol)(implicit ctx: Context): Symbol =
+ if (isMethodWithExtension(target) &&
+ target.owner.linkedClass == derived.owner) extensionMethod(target)
+ else NoSymbol
+
+ private def createExtensionMethod(imeth: Symbol, staticClass: Symbol)(implicit ctx: Context): TermSymbol = {
+ val extensionName = extensionNames(imeth).head.toTermName
+ val extensionMeth = ctx.newSymbol(staticClass, extensionName,
+ imeth.flags | Final &~ (Override | Protected | AbsOverride),
+ fullyParameterizedType(imeth.info, imeth.owner.asClass),
+ privateWithin = imeth.privateWithin, coord = imeth.coord)
+ extensionMeth.addAnnotations(imeth.annotations)(ctx.withPhase(thisTransformer))
+ // need to change phase to add tailrec annotation which gets removed from original method in the same phase.
+ extensionMeth
+ }
+
+ private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
+ // TODO: this is state and should be per-run
+ // todo: check that when transformation finished map is empty
+
+ private def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: ClassSymbol)(implicit ctx: Context): Unit =
+ if (seen contains clazz)
+ ctx.error("value class may not unbox to itself", pos)
+ else {
+ val unboxed = underlyingOfValueClass(clazz).typeSymbol
+ if (isDerivedValueClass(unboxed)) checkNonCyclic(pos, seen + clazz, unboxed.asClass)
+ }
+
+ override def transformTemplate(tree: tpd.Template)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (isDerivedValueClass(ctx.owner)) {
+ /* This is currently redundant since value classes may not
+ wrap over other value classes anyway.
+ checkNonCyclic(ctx.owner.pos, Set(), ctx.owner) */
+ tree
+ } else if (ctx.owner.isStaticOwner) {
+ extensionDefs remove tree.symbol.owner match {
+ case Some(defns) if defns.nonEmpty =>
+ cpy.Template(tree)(body = tree.body ++
+ defns.map(transformFollowing(_)))
+ case _ =>
+ tree
+ }
+ } else tree
+ }
+
+ override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ if (isMethodWithExtension(tree.symbol)) {
+ val origMeth = tree.symbol
+ val origClass = ctx.owner.asClass
+ val staticClass = origClass.linkedClass
+ assert(staticClass.exists, s"$origClass lacks companion, ${origClass.owner.definedPeriodsString} ${origClass.owner.info.decls} ${origClass.owner.info.decls}")
+ val extensionMeth = extensionMethod(origMeth)
+ ctx.log(s"Value class $origClass spawns extension method.\n Old: ${origMeth.showDcl}\n New: ${extensionMeth.showDcl}")
+ val store: ListBuffer[Tree] = extensionDefs.get(staticClass) match {
+ case Some(x) => x
+ case None =>
+ val newC = new ListBuffer[Tree]()
+ extensionDefs(staticClass) = newC
+ newC
+ }
+ store += atGroupEnd(fullyParameterizedDef(extensionMeth, tree)(_))
+ cpy.DefDef(tree)(rhs = atGroupEnd(forwarder(extensionMeth, tree)(_)))
+ } else tree
+ }
+}
+
+object ExtensionMethods {
+ /** Generate stream of possible names for the extension version of given instance method `imeth`.
+ * If the method is not overloaded, this stream consists of just "imeth$extension".
+ * If the method is overloaded, the stream has as first element "imeth$extenionX", where X is the
+ * index of imeth in the sequence of overloaded alternatives with the same name. This choice will
+ * always be picked as the name of the generated extension method.
+ * After this first choice, all other possible indices in the range of 0 until the number
+ * of overloaded alternatives are returned. The secondary choices are used to find a matching method
+ * in `extensionMethod` if the first name has the wrong type. We thereby gain a level of insensitivity
+ * of how overloaded types are ordered between phases and picklings.
+ */
+ private def extensionNames(imeth: Symbol)(implicit ctx: Context): Stream[Name] = {
+ val decl = imeth.owner.info.decl(imeth.name)
+
+ /** No longer needed for Dotty, as we are more disciplined with scopes now.
+ // Bridge generation is done at phase `erasure`, but new scopes are only generated
+ // for the phase after that. So bridges are visible in earlier phases.
+ //
+ // `info.member(imeth.name)` filters these out, but we need to use `decl`
+ // to restrict ourselves to members defined in the current class, so we
+ // must do the filtering here.
+ val declTypeNoBridge = decl.filter(sym => !sym.isBridge).tpe
+ */
+ decl match {
+ case decl: MultiDenotation =>
+ val alts = decl.alternatives
+ val index = alts indexOf imeth.denot
+ assert(index >= 0, alts + " does not contain " + imeth)
+ def altName(index: Int) = (imeth.name + "$extension" + index).toTermName
+ altName(index) #:: ((0 until alts.length).toStream filter (index != _) map altName)
+ case decl =>
+ assert(decl.exists, imeth.name + " not found in " + imeth.owner + "'s decls: " + imeth.owner.info.decls)
+ Stream((imeth.name + "$extension").toTermName)
+ }
+ }
+
+ /** Return the extension method that corresponds to given instance method `meth`. */
+ def extensionMethod(imeth: Symbol)(implicit ctx: Context): TermSymbol =
+ ctx.atPhase(ctx.extensionMethodsPhase.next) { implicit ctx =>
+ // FIXME use toStatic instead?
+ val companionInfo = imeth.owner.companionModule.info
+ val candidates = extensionNames(imeth) map (companionInfo.decl(_).symbol) filter (_.exists)
+ val matching = candidates filter (c => FullParameterization.memberSignature(c.info) == imeth.signature)
+ assert(matching.nonEmpty,
+ i"""no extension method found for:
+ |
+ | $imeth:${imeth.info.show} with signature ${imeth.signature}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name + ":" + c.info.show).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name + ":" + c.info.signature + ":" + FullParameterization.memberSignature(c.info)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}""")
+ matching.head.asTerm
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
new file mode 100644
index 000000000..597146514
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
@@ -0,0 +1,193 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Names._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Phases.NeedsCompanions
+import dotty.tools.dotc.transform.TreeTransforms._
+import ast.Trees._
+import Flags._
+import Types._
+import Constants.Constant
+import Contexts.Context
+import Symbols._
+import SymDenotations._
+import Decorators._
+import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
+import dotty.tools.dotc.core.Denotations.SingleDenotation
+import scala.collection.mutable
+import DenotTransformers._
+import typer.Checking
+import Names.Name
+import NameOps._
+import StdNames._
+
+
+/** The first tree transform
+ * - ensures there are companion objects for all classes except module classes
+ * - eliminates some kinds of trees: Imports, NamedArgs
+ * - stubs out native methods
+ * - eliminates self tree in Template and self symbol in ClassInfo
+ * - collapsess all type trees to trees of class TypeTree
+ * - converts idempotent expressions with constant types
+ */
+class FirstTransform extends MiniPhaseTransform with InfoTransformer with AnnotationTransformer { thisTransformer =>
+ import ast.tpd._
+
+ override def phaseName = "firstTransform"
+
+ private var addCompanionPhases: List[NeedsCompanions] = _
+
+ def needsCompanion(cls: ClassSymbol)(implicit ctx: Context) =
+ addCompanionPhases.exists(_.isCompanionNeeded(cls))
+
+ override def prepareForUnit(tree: tpd.Tree)(implicit ctx: Context): TreeTransform = {
+ addCompanionPhases = ctx.phasePlan.flatMap(_ collect { case p: NeedsCompanions => p })
+ this
+ }
+
+ /** eliminate self symbol in ClassInfo */
+ override def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type = tp match {
+ case tp @ ClassInfo(_, _, _, _, self: Symbol) =>
+ tp.derivedClassInfo(selfInfo = self.info)
+ case _ =>
+ tp
+ }
+
+ /*
+ tp match {
+ //create companions for value classes that are not from currently compiled source file
+ case tp@ClassInfo(_, cls, _, decls, _)
+ if (ValueClasses.isDerivedValueClass(cls)) &&
+ !sym.isDefinedInCurrentRun && sym.scalacLinkedClass == NoSymbol =>
+ val newDecls = decls.cloneScope
+ val (modul, mcMethod, symMethod) = newCompanion(sym.name.toTermName, sym)
+ modul.entered
+ mcMethod.entered
+ newDecls.enter(symMethod)
+ tp.derivedClassInfo(decls = newDecls)
+ case _ => tp
+ }
+ }
+ */
+
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
+ tree match {
+ case Select(qual, name) if !name.isOuterSelect && tree.symbol.exists =>
+ assert(qual.tpe derivesFrom tree.symbol.owner, i"non member selection of ${tree.symbol.showLocated} from ${qual.tpe} in $tree")
+ case _: TypeTree =>
+ case _: Import | _: NamedArg | _: TypTree =>
+ assert(false, i"illegal tree: $tree")
+ case _ =>
+ }
+ }
+
+ /** Reorder statements so that module classes always come after their companion classes, add missing companion classes */
+ private def reorderAndComplete(stats: List[Tree])(implicit ctx: Context): List[Tree] = {
+ val moduleClassDefs, singleClassDefs = mutable.Map[Name, Tree]()
+
+ def reorder(stats: List[Tree]): List[Tree] = stats match {
+ case (stat: TypeDef) :: stats1 if stat.symbol.isClass =>
+ if (stat.symbol is Flags.Module) {
+ moduleClassDefs += (stat.name -> stat)
+ singleClassDefs -= stat.name.stripModuleClassSuffix
+ val stats1r = reorder(stats1)
+ if (moduleClassDefs contains stat.name) stat :: stats1r else stats1r
+ } else {
+ def stats1r = reorder(stats1)
+ val normalized = moduleClassDefs remove stat.name.moduleClassName match {
+ case Some(mcdef) =>
+ mcdef :: stats1r
+ case None =>
+ singleClassDefs += (stat.name -> stat)
+ stats1r
+ }
+ stat :: normalized
+ }
+ case stat :: stats1 => stat :: reorder(stats1)
+ case Nil => Nil
+ }
+
+ def registerCompanion(name: TermName, forClass: Symbol): TermSymbol = {
+ val (modul, mcCompanion, classCompanion) = newCompanion(name, forClass)
+ if (ctx.owner.isClass) modul.enteredAfter(thisTransformer)
+ mcCompanion.enteredAfter(thisTransformer)
+ classCompanion.enteredAfter(thisTransformer)
+ modul
+ }
+
+ def addMissingCompanions(stats: List[Tree]): List[Tree] = stats map {
+ case stat: TypeDef if (singleClassDefs contains stat.name) && needsCompanion(stat.symbol.asClass) =>
+ val objName = stat.name.toTermName
+ val nameClash = stats.exists {
+ case other: MemberDef =>
+ other.name == objName && other.symbol.info.isParameterless
+ case _ =>
+ false
+ }
+ val uniqueName = if (nameClash) objName.avoidClashName else objName
+ Thicket(stat :: ModuleDef(registerCompanion(uniqueName, stat.symbol), Nil).trees)
+ case stat => stat
+ }
+
+ addMissingCompanions(reorder(stats))
+ }
+
+ private def newCompanion(name: TermName, forClass: Symbol)(implicit ctx: Context) = {
+ val modul = ctx.newCompleteModuleSymbol(forClass.owner, name, Synthetic, Synthetic,
+ defn.ObjectType :: Nil, Scopes.newScope, assocFile = forClass.asClass.assocFile)
+ val mc = modul.moduleClass
+
+ val mcComp = ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, forClass, mc)
+ val classComp = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, mc, forClass)
+ (modul, mcComp, classComp)
+ }
+
+ /** elimiate self in Template */
+ override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ cpy.Template(impl)(self = EmptyValDef)
+ }
+
+ override def transformDefDef(ddef: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
+ if (ddef.symbol.hasAnnotation(defn.NativeAnnot)) {
+ ddef.symbol.resetFlag(Deferred)
+ DefDef(ddef.symbol.asTerm,
+ _ => ref(defn.Sys_errorR).withPos(ddef.pos)
+ .appliedTo(Literal(Constant("native method stub"))))
+ } else ddef
+ }
+
+ override def transformStats(trees: List[Tree])(implicit ctx: Context, info: TransformerInfo): List[Tree] =
+ ast.Trees.flatten(reorderAndComplete(trees)(ctx.withPhase(thisTransformer.next)))
+
+ override def transformOther(tree: Tree)(implicit ctx: Context, info: TransformerInfo) = tree match {
+ case tree: Import => EmptyTree
+ case tree: NamedArg => transform(tree.arg)
+ case tree => if (tree.isType) TypeTree(tree.tpe).withPos(tree.pos) else tree
+ }
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) =
+ if (tree.isType) TypeTree(tree.tpe).withPos(tree.pos)
+ else constToLiteral(tree)
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) =
+ if (tree.isType) TypeTree(tree.tpe).withPos(tree.pos)
+ else constToLiteral(tree)
+
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo) =
+ constToLiteral(tree)
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) =
+ constToLiteral(tree)
+
+ override def transformTyped(tree: Typed)(implicit ctx: Context, info: TransformerInfo) =
+ constToLiteral(tree)
+
+ override def transformBlock(tree: Block)(implicit ctx: Context, info: TransformerInfo) =
+ constToLiteral(tree)
+
+ // invariants: all modules have companion objects
+ // all types are TypeTrees
+ // all this types are explicit
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala
new file mode 100644
index 000000000..f0104e715
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala
@@ -0,0 +1,47 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers.SymTransformer
+import Phases.Phase
+import Contexts.Context
+import Flags._
+import SymDenotations.SymDenotation
+import collection.mutable
+import TreeTransforms.MiniPhaseTransform
+import dotty.tools.dotc.transform.TreeTransforms.TransformerInfo
+
+/** Lift nested classes to toplevel */
+class Flatten extends MiniPhaseTransform with SymTransformer { thisTransform =>
+ import ast.tpd._
+ override def phaseName = "flatten"
+
+ def transformSym(ref: SymDenotation)(implicit ctx: Context) = {
+ if (ref.isClass && !ref.is(Package) && !ref.owner.is(Package)) {
+ ref.copySymDenotation(
+ name = ref.flatName,
+ owner = ref.enclosingPackageClass)
+ }
+ else ref
+ }
+
+ private val liftedDefs = new mutable.ListBuffer[Tree]
+
+ private def liftIfNested(tree: Tree)(implicit ctx: Context, info: TransformerInfo) =
+ if (ctx.owner is Package) tree
+ else {
+ transformFollowing(tree).foreachInThicket(liftedDefs += _)
+ EmptyTree
+ }
+
+ override def transformStats(stats: List[Tree])(implicit ctx: Context, info: TransformerInfo) =
+ if (ctx.owner is Package) {
+ val liftedStats = stats ++ liftedDefs
+ liftedDefs.clear
+ liftedStats
+ }
+ else stats
+
+ override def transformTypeDef(tree: TypeDef)(implicit ctx: Context, info: TransformerInfo) =
+ liftIfNested(tree)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala
new file mode 100644
index 000000000..6c69c735b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala
@@ -0,0 +1,263 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Types._
+import Contexts._
+import Symbols._
+import Decorators._
+import TypeUtils._
+import StdNames.nme
+import NameOps._
+import ast._
+import ast.Trees._
+
+import scala.reflect.internal.util.Collections
+
+/** Provides methods to produce fully parameterized versions of instance methods,
+ * where the `this` of the enclosing class is abstracted out in an extra leading
+ * `$this` parameter and type parameters of the class become additional type
+ * parameters of the fully parameterized method.
+ *
+ * Example usage scenarios are:
+ *
+ * - extension methods of value classes
+ * - implementations of trait methods
+ * - static protected accessors
+ * - local methods produced by tailrec transform
+ *
+ * Note that the methods lift out type parameters of the class containing
+ * the instance method, but not type parameters of enclosing classes. The
+ * fully instantiated method therefore needs to be put in a scope "close"
+ * to the original method, i.e. they need to share the same outer pointer.
+ * Examples of legal positions are: in the companion object, or as a local
+ * method inside the original method.
+ *
+ * Note: The scheme does not handle yet methods where type parameter bounds
+ * depend on value parameters of the enclosing class, as in:
+ *
+ * class C(val a: String) extends AnyVal {
+ * def foo[U <: a.type]: Unit = ...
+ * }
+ *
+ * The expansion of method `foo` would lead to
+ *
+ * def foo$extension[U <: $this.a.type]($this: C): Unit = ...
+ *
+ * which is not typable. Not clear yet what to do. Maybe allow PolyTypes
+ * to follow method parameters and translate to the following:
+ *
+ * def foo$extension($this: C)[U <: $this.a.type]: Unit = ...
+ *
+ * @see class-dependent-extension-method.scala in pending/pos.
+ */
+trait FullParameterization {
+
+ import tpd._
+ import FullParameterization._
+
+ /** If references to original symbol `referenced` from within fully parameterized method
+ * `derived` should be rewired to some fully parameterized method, the rewiring target symbol,
+ * otherwise NoSymbol.
+ */
+ protected def rewiredTarget(referenced: Symbol, derived: Symbol)(implicit ctx: Context): Symbol
+
+ /** If references to some original symbol from given tree node within fully parameterized method
+ * `derived` should be rewired to some fully parameterized method, the rewiring target symbol,
+ * otherwise NoSymbol. By default implemented as
+ *
+ * rewiredTarget(tree.symbol, derived)
+ *
+ * but can be overridden.
+ */
+ protected def rewiredTarget(tree: Tree, derived: Symbol)(implicit ctx: Context): Symbol =
+ rewiredTarget(tree.symbol, derived)
+
+ /** Converts the type `info` of a member of class `clazz` to a method type that
+ * takes the `this` of the class and any type parameters of the class
+ * as additional parameters. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = ...
+ * }
+ *
+ * leads to:
+ *
+ * object Foo {
+ * def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * If a self type is present, $this has this self type as its type.
+ *
+ * @param abstractOverClass if true, include the type parameters of the class in the method's list of type parameters.
+ * @param liftThisType if true, require created $this to be $this: (Foo[A] & Foo,this).
+ * This is needed if created member stays inside scope of Foo(as in tailrec)
+ */
+ def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(implicit ctx: Context): Type = {
+ val (mtparamCount, origResult) = info match {
+ case info: PolyType => (info.paramNames.length, info.resultType)
+ case info: ExprType => (0, info.resultType)
+ case _ => (0, info)
+ }
+ val ctparams = if (abstractOverClass) clazz.typeParams else Nil
+ val ctnames = ctparams.map(_.name.unexpandedName)
+
+ /** The method result type */
+ def resultType(mapClassParams: Type => Type) = {
+ val thisParamType = mapClassParams(clazz.classInfo.selfType)
+ val firstArgType = if (liftThisType) thisParamType & clazz.thisType else thisParamType
+ MethodType(nme.SELF :: Nil, firstArgType :: Nil)(mt =>
+ mapClassParams(origResult).substThisUnlessStatic(clazz, MethodParam(mt, 0)))
+ }
+
+ /** Replace class type parameters by the added type parameters of the polytype `pt` */
+ def mapClassParams(tp: Type, pt: PolyType): Type = {
+ val classParamsRange = (mtparamCount until mtparamCount + ctparams.length).toList
+ tp.substDealias(ctparams, classParamsRange map (PolyParam(pt, _)))
+ }
+
+ /** The bounds for the added type parameters of the polytype `pt` */
+ def mappedClassBounds(pt: PolyType): List[TypeBounds] =
+ ctparams.map(tparam => mapClassParams(tparam.info, pt).bounds)
+
+ info match {
+ case info: PolyType =>
+ PolyType(info.paramNames ++ ctnames)(
+ pt =>
+ (info.paramBounds.map(mapClassParams(_, pt).bounds) ++
+ mappedClassBounds(pt)).mapConserve(_.subst(info, pt).bounds),
+ pt => resultType(mapClassParams(_, pt)).subst(info, pt))
+ case _ =>
+ if (ctparams.isEmpty) resultType(identity)
+ else PolyType(ctnames)(mappedClassBounds, pt => resultType(mapClassParams(_, pt)))
+ }
+ }
+
+ /** The type parameters (skolems) of the method definition `originalDef`,
+ * followed by the class parameters of its enclosing class.
+ */
+ private def allInstanceTypeParams(originalDef: DefDef, abstractOverClass: Boolean)(implicit ctx: Context): List[Symbol] =
+ if (abstractOverClass)
+ originalDef.tparams.map(_.symbol) ::: originalDef.symbol.enclosingClass.typeParams
+ else originalDef.tparams.map(_.symbol)
+
+ /** Given an instance method definition `originalDef`, return a
+ * fully parameterized method definition derived from `originalDef`, which
+ * has `derived` as symbol and `fullyParameterizedType(originalDef.symbol.info)`
+ * as info.
+ * `abstractOverClass` defines weather the DefDef should abstract over type parameters
+ * of class that contained original defDef
+ */
+ def fullyParameterizedDef(derived: TermSymbol, originalDef: DefDef, abstractOverClass: Boolean = true)(implicit ctx: Context): Tree =
+ polyDefDef(derived, trefs => vrefss => {
+ val origMeth = originalDef.symbol
+ val origClass = origMeth.enclosingClass.asClass
+ val origTParams = allInstanceTypeParams(originalDef, abstractOverClass)
+ val origVParams = originalDef.vparamss.flatten map (_.symbol)
+ val thisRef :: argRefs = vrefss.flatten
+
+ /** If tree should be rewired, the rewired tree, otherwise EmptyTree.
+ * @param targs Any type arguments passed to the rewired tree.
+ */
+ def rewireTree(tree: Tree, targs: List[Tree])(implicit ctx: Context): Tree = {
+ def rewireCall(thisArg: Tree): Tree = {
+ val rewired = rewiredTarget(tree, derived)
+ if (rewired.exists) {
+ val base = thisArg.tpe.baseTypeWithArgs(origClass)
+ assert(base.exists)
+ ref(rewired.termRef)
+ .appliedToTypeTrees(targs ++ base.argInfos.map(TypeTree(_)))
+ .appliedTo(thisArg)
+ } else EmptyTree
+ }
+ tree match {
+ case Return(expr, from) if !from.isEmpty =>
+ val rewired = rewiredTarget(from, derived)
+ if (rewired.exists)
+ tpd.cpy.Return(tree)(expr, Ident(rewired.termRef))
+ else
+ EmptyTree
+ case Ident(_) => rewireCall(thisRef)
+ case Select(qual, _) => rewireCall(qual)
+ case tree @ TypeApply(fn, targs1) =>
+ assert(targs.isEmpty)
+ rewireTree(fn, targs1)
+ case _ => EmptyTree
+ }
+ }
+
+ /** Type rewiring is needed because a previous reference to an instance
+ * method might still persist in the types of enclosing nodes. Example:
+ *
+ * if (true) this.imeth else this.imeth
+ *
+ * is rewritten to
+ *
+ * if (true) xmeth($this) else xmeth($this)
+ *
+ * but the type `this.imeth` still persists as the result type of the `if`,
+ * because it is kept by the `cpy` operation of the tree transformer.
+ * It needs to be rewritten to the common result type of `imeth` and `xmeth`.
+ */
+ def rewireType(tpe: Type) = tpe match {
+ case tpe: TermRef if rewiredTarget(tpe.symbol, derived).exists => tpe.widen
+ case _ => tpe
+ }
+
+ new TreeTypeMap(
+ typeMap = rewireType(_)
+ .substDealias(origTParams, trefs)
+ .subst(origVParams, argRefs.map(_.tpe))
+ .substThisUnlessStatic(origClass, thisRef.tpe),
+ treeMap = {
+ case tree: This if tree.symbol == origClass => thisRef
+ case tree => rewireTree(tree, Nil) orElse tree
+ },
+ oldOwners = origMeth :: Nil,
+ newOwners = derived :: Nil
+ ).transform(originalDef.rhs)
+ })
+
+ /** A forwarder expression which calls `derived`, passing along
+ * - if `abstractOverClass` the type parameters and enclosing class parameters of originalDef`,
+ * - the `this` of the enclosing class,
+ * - the value parameters of the original method `originalDef`.
+ */
+ def forwarder(derived: TermSymbol, originalDef: DefDef, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(implicit ctx: Context): Tree = {
+ val fun =
+ ref(derived.termRef)
+ .appliedToTypes(allInstanceTypeParams(originalDef, abstractOverClass).map(_.typeRef))
+ .appliedTo(This(originalDef.symbol.enclosingClass.asClass))
+
+ (if (!liftThisType)
+ fun.appliedToArgss(originalDef.vparamss.nestedMap(vparam => ref(vparam.symbol)))
+ else {
+ // this type could have changed on forwarding. Need to insert a cast.
+ val args = Collections.map2(originalDef.vparamss, fun.tpe.paramTypess)((vparams, paramTypes) =>
+ Collections.map2(vparams, paramTypes)((vparam, paramType) => {
+ assert(vparam.tpe <:< paramType.widen) // type should still conform to widened type
+ ref(vparam.symbol).ensureConforms(paramType)
+ })
+ )
+ fun.appliedToArgss(args)
+
+ }).withPos(originalDef.rhs.pos)
+ }
+}
+
+object FullParameterization {
+
+ /** Assuming `info` is a result of a `fullyParameterizedType` call, the signature of the
+ * original method type `X` such that `info = fullyParameterizedType(X, ...)`.
+ */
+ def memberSignature(info: Type)(implicit ctx: Context): Signature = info match {
+ case info: PolyType =>
+ memberSignature(info.resultType)
+ case info @ MethodType(nme.SELF :: Nil, _) =>
+ info.resultType.ensureMethodic.signature
+ case info @ MethodType(nme.SELF :: otherNames, thisType :: otherTypes) =>
+ info.derivedMethodType(otherNames, otherTypes, info.resultType).signature
+ case _ =>
+ Signature.NotAMethod
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala b/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala
new file mode 100644
index 000000000..5fd89314a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/FunctionalInterfaces.scala
@@ -0,0 +1,83 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import core.SymDenotations._
+import core.StdNames.nme
+import core.Names._
+import core.NameOps._
+import ast.Trees._
+import SymUtils._
+import dotty.tools.dotc.ast.tpd
+import collection.{ mutable, immutable }
+import collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet }
+
+/**
+ * Rewires closures to implement more specific types of Functions.
+ */
+class FunctionalInterfaces extends MiniPhaseTransform {
+ import tpd._
+
+ def phaseName: String = "functionalInterfaces"
+
+ private var allowedReturnTypes: Set[Symbol] = _ // moved here to make it explicit what specializations are generated
+ private var allowedArgumentTypes: Set[Symbol] = _
+ val maxArgsCount = 2
+
+ def shouldSpecialize(m: MethodType)(implicit ctx: Context) =
+ (m.paramTypes.size <= maxArgsCount) &&
+ m.paramTypes.forall(x => allowedArgumentTypes.contains(x.typeSymbol)) &&
+ allowedReturnTypes.contains(m.resultType.typeSymbol)
+
+ val functionName = "JFunction".toTermName
+ val functionPackage = "scala.compat.java8.".toTermName
+
+ override def prepareForUnit(tree: tpd.Tree)(implicit ctx: Context): TreeTransform = {
+ allowedReturnTypes = Set(defn.UnitClass,
+ defn.BooleanClass,
+ defn.IntClass,
+ defn.FloatClass,
+ defn.LongClass,
+ defn.DoubleClass,
+ /* only for Function0: */ defn.ByteClass,
+ defn.ShortClass,
+ defn.CharClass)
+
+ allowedArgumentTypes = Set(defn.IntClass,
+ defn.LongClass,
+ defn.DoubleClass,
+ /* only for Function1: */ defn.FloatClass)
+
+ this
+ }
+
+ override def transformClosure(tree: Closure)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ tree.tpt match {
+ case EmptyTree =>
+ val m = tree.meth.tpe.widen.asInstanceOf[MethodType]
+
+ if (shouldSpecialize(m)) {
+ val functionSymbol = tree.tpe.widenDealias.classSymbol
+ val names = ctx.atPhase(ctx.erasurePhase) {
+ implicit ctx => functionSymbol.typeParams.map(_.name)
+ }
+ val interfaceName = (functionName ++ m.paramTypes.length.toString).specializedFor(m.paramTypes ::: m.resultType :: Nil, names, Nil, Nil)
+
+ // symbols loaded from classpath aren't defined in periods earlier than when they where loaded
+ val interface = ctx.withPhase(ctx.typerPhase).getClassIfDefined(functionPackage ++ interfaceName)
+ if (interface.exists) {
+ val tpt = tpd.TypeTree(interface.asType.typeRef)
+ tpd.Closure(tree.env, tree.meth, tpt)
+ } else tree
+ } else tree
+ case _ =>
+ tree
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/GetClass.scala b/compiler/src/dotty/tools/dotc/transform/GetClass.scala
new file mode 100644
index 000000000..6a9a5fda2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/GetClass.scala
@@ -0,0 +1,34 @@
+package dotty.tools.dotc
+package transform
+
+import ast.tpd
+import core.Contexts.Context
+import core.StdNames.nme
+import core.Phases.Phase
+import TypeUtils._
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Rewrite `getClass` calls as follow:
+ *
+ * For every instance of primitive class C whose boxed class is called B:
+ * instanceC.getClass -> B.TYPE
+ * For every instance of non-primitive class D:
+ * instanceD.getClass -> instanceD.getClass
+ */
+class GetClass extends MiniPhaseTransform {
+ import tpd._
+
+ override def phaseName: String = "getClass"
+
+ // getClass transformation should be applied to specialized methods
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure], classOf[FunctionalInterfaces])
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ import ast.Trees._
+ tree match {
+ case Apply(Select(qual, nme.getClass_), Nil) if qual.tpe.widen.isPrimitiveValueType =>
+ clsOf(qual.tpe.widen).withPos(tree.pos)
+ case _ => tree
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala
new file mode 100644
index 000000000..31171dfab
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala
@@ -0,0 +1,76 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers.SymTransformer
+import Contexts.Context
+import SymDenotations.SymDenotation
+import Types._
+import Symbols._
+import SymUtils._
+import Constants._
+import TreeTransforms._
+import Flags._
+import Decorators._
+import ValueClasses._
+
+/** Performs the following rewritings for fields of a class:
+ *
+ * <mods> val x: T = e
+ * --> <mods> <stable> <accessor> def x: T = e
+ * <mods> var x: T = e
+ * --> <mods> <accessor> def x: T = e
+ *
+ * <mods> val x: T
+ * --> <mods> <stable> <accessor> def x: T
+ *
+ * <mods> lazy val x: T = e
+ * --> <mods> <accessor> lazy def x: T =e
+ *
+ * <mods> var x: T
+ * --> <mods> <accessor> def x: T
+ *
+ * <mods> non-static <module> val x$ = e
+ * --> <mods> <module> <accessor> def x$ = e
+ *
+ * Omitted from the rewritings are
+ *
+ * - private[this] fields in classes (excluding traits, value classes)
+ * - fields generated for static modules (TODO: needed?)
+ * - parameters, static fields, and fields coming from Java
+ *
+ * Furthermore, assignments to mutable vars are replaced by setter calls
+ *
+ * p.x = e
+ * --> p.x_=(e)
+ *
+ * No fields are generated yet. This is done later in phase Memoize.
+ */
+class Getters extends MiniPhaseTransform with SymTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName = "getters"
+
+ override def transformSym(d: SymDenotation)(implicit ctx: Context): SymDenotation = {
+ def noGetterNeeded =
+ d.is(NoGetterNeeded) ||
+ d.initial.asInstanceOf[SymDenotation].is(PrivateLocal) && !d.owner.is(Trait) && !isDerivedValueClass(d.owner) && !d.is(Flags.Lazy) ||
+ d.is(Module) && d.isStatic ||
+ d.hasAnnotation(defn.ScalaStaticAnnot) ||
+ d.isSelfSym
+ if (d.isTerm && (d.is(Lazy) || d.owner.isClass) && d.info.isValueType && !noGetterNeeded) {
+ val maybeStable = if (d.isStable) Stable else EmptyFlags
+ d.copySymDenotation(
+ initFlags = d.flags | maybeStable | AccessorCreationFlags,
+ info = ExprType(d.info))
+ }
+ else d
+ }
+ private val NoGetterNeeded = Method | Param | JavaDefined | JavaStatic
+
+ override def transformValDef(tree: ValDef)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (tree.symbol is Method) DefDef(tree.symbol.asTerm, tree.rhs).withPos(tree.pos) else tree
+
+ override def transformAssign(tree: Assign)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (tree.lhs.symbol is Method) tree.lhs.becomes(tree.rhs).withPos(tree.pos) else tree
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
new file mode 100644
index 000000000..7c60e8d72
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
@@ -0,0 +1,131 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.Denotations._
+import core.SymDenotations._
+import core.Contexts._
+import core.Types._
+import ast.Trees._
+import ast.tpd.{Apply, Tree, cpy}
+import dotty.tools.dotc.ast.tpd
+import scala.collection.mutable
+import dotty.tools.dotc._
+import core._
+import Contexts._
+import Symbols._
+import Decorators._
+import NameOps._
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransformer, TreeTransform}
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.{untpd, tpd}
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Types.MethodType
+import dotty.tools.dotc.core.Names.Name
+import scala.collection.mutable.ListBuffer
+import dotty.tools.dotc.core.Denotations.SingleDenotation
+import dotty.tools.dotc.core.SymDenotations.SymDenotation
+import StdNames._
+import Phases.Phase
+
+/** Replace member references as follows:
+ *
+ * - `x != y` for != in class Any becomes `!(x == y)` with == in class Any.
+ * - `x.##` for ## in NullClass becomes `0`
+ * - `x.##` for ## in Any becomes calls to ScalaRunTime.hash,
+ * using the most precise overload available
+ * - `x.getClass` for getClass in primitives becomes `x.getClass` with getClass in class Object.
+ */
+class InterceptedMethods extends MiniPhaseTransform {
+ thisTransform =>
+
+ import tpd._
+
+ override def phaseName: String = "intercepted"
+
+ private var primitiveGetClassMethods: Set[Symbol] = _
+
+ var Any_## : Symbol = _ // cached for performance reason
+
+ /** perform context-dependant initialization */
+ override def prepareForUnit(tree: Tree)(implicit ctx: Context) = {
+ this.Any_## = defn.Any_##
+ primitiveGetClassMethods = Set[Symbol]() ++ defn.ScalaValueClasses().map(x => x.requiredMethod(nme.getClass_))
+ this
+ }
+
+ // this should be removed if we have guarantee that ## will get Apply node
+ override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (tree.symbol.isTerm && (Any_## eq tree.symbol.asTerm)) {
+ val rewrite = poundPoundValue(tree.qualifier)
+ ctx.log(s"$phaseName rewrote $tree to $rewrite")
+ rewrite
+ }
+ else tree
+ }
+
+ private def poundPoundValue(tree: Tree)(implicit ctx: Context) = {
+ val s = tree.tpe.widen.typeSymbol
+ if (s == defn.NullClass) Literal(Constant(0))
+ else {
+ // Since we are past typer, we need to avoid creating trees carrying
+ // overloaded types. This logic is custom (and technically incomplete,
+ // although serviceable) for def hash. What is really needed is for
+ // the overloading logic presently hidden away in a few different
+ // places to be properly exposed so we can just call "resolveOverload"
+ // after typer. Until then:
+
+ def alts = defn.ScalaRuntimeModule.info.member(nme.hash_)
+
+ // if tpe is a primitive value type, alt1 will match on the exact value,
+ // taking in account that null.asInstanceOf[Int] == 0
+ def alt1 = alts.suchThat(_.info.firstParamTypes.head =:= tree.tpe.widen)
+
+ // otherwise alt2 will match. alt2 also knows how to handle 'null' runtime value
+ def alt2 = defn.ScalaRuntimeModule.info.member(nme.hash_)
+ .suchThat(_.info.firstParamTypes.head.typeSymbol == defn.AnyClass)
+
+ Ident((if (s.isNumericValueClass) alt1 else alt2).termRef)
+ .appliedTo(tree)
+ }
+ }
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ def unknown = {
+ assert(false, s"The symbol '${tree.fun.symbol.showLocated}' was intercepted but didn't match any cases, " +
+ s"that means the intercepted methods set doesn't match the code")
+ tree
+ }
+ lazy val Select(qual, _) = tree.fun
+ val Any_## = this.Any_##
+ val Any_!= = defn.Any_!=
+ val rewrite: Tree = tree.fun.symbol match {
+ case Any_## =>
+ poundPoundValue(qual)
+ case Any_!= =>
+ qual.select(defn.Any_==).appliedToArgs(tree.args).select(defn.Boolean_!)
+ /*
+ /* else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
+ // todo: this is needed to support value classes
+ // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
+ global.typer.typed(gen.mkRuntimeCall(nme.anyValClass,
+ List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
+ }*/
+ */
+ case t if primitiveGetClassMethods.contains(t) =>
+ // if we got here then we're trying to send a primitive getClass method to either
+ // a) an Any, in which cage Object_getClass works because Any erases to object. Or
+ //
+ // b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
+ // of the refinement is a primitive and another is AnyRef. In that case
+ // we get a primitive form of _getClass trying to target a boxed value
+ // so we need replace that method name with Object_getClass to get correct behavior.
+ // See SI-5568.
+ qual.selectWithSig(defn.Any_getClass).appliedToNone
+ case _ =>
+ tree
+ }
+ ctx.log(s"$phaseName rewrote $tree to $rewrite")
+ rewrite
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala
new file mode 100644
index 000000000..8bc4a2aa9
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala
@@ -0,0 +1,168 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.util.Positions._
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+import core._
+import Contexts.Context, Types._, Constants._, Decorators._, Symbols._
+import TypeUtils._, TypeErasure._, Flags._
+
+
+/** Implements partial evaluation of `sc.isInstanceOf[Sel]` according to:
+ *
+ * +-------------+----------------------------+----------------------------+------------------+
+ * | Sel\sc | trait | class | final class |
+ * +-------------+----------------------------+----------------------------+------------------+
+ * | trait | ? | ? | statically known |
+ * | class | ? | false if classes unrelated | statically known |
+ * | final class | false if classes unrelated | false if classes unrelated | statically known |
+ * +-------------+----------------------------+----------------------------+------------------+
+ *
+ * This is a generalized solution to raising an error on unreachable match
+ * cases and warnings on other statically known results of `isInstanceOf`.
+ *
+ * Steps taken:
+ *
+ * 1. evalTypeApply will establish the matrix and choose the appropriate
+ * handling for the case:
+ * 2. a) Sel/sc is a value class or scrutinee is `Any`
+ * b) handleStaticallyKnown
+ * c) falseIfUnrelated with `scrutinee <:< selector`
+ * d) handleFalseUnrelated
+ * e) leave as is (aka `happens`)
+ * 3. Rewrite according to step taken in `2`
+ */
+class IsInstanceOfEvaluator extends MiniPhaseTransform { thisTransformer =>
+
+ import dotty.tools.dotc.ast.tpd._
+
+ def phaseName = "isInstanceOfEvaluator"
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val defn = ctx.definitions
+
+ /** Handles the four cases of statically known `isInstanceOf`s and gives
+ * the correct warnings, or an error if statically known to be false in
+ * match
+ */
+ def handleStaticallyKnown(select: Select, scrutinee: Type, selector: Type, inMatch: Boolean, pos: Position): Tree = {
+ val scrutineeSubSelector = scrutinee <:< selector
+ if (!scrutineeSubSelector && inMatch) {
+ ctx.error(
+ s"this case is unreachable due to `${selector.show}` not being a subclass of `${scrutinee.show}`",
+ Position(pos.start - 5, pos.end - 5)
+ )
+ rewrite(select, to = false)
+ } else if (!scrutineeSubSelector && !inMatch) {
+ ctx.warning(
+ s"this will always yield false since `${scrutinee.show}` is not a subclass of `${selector.show}` (will be optimized away)",
+ pos
+ )
+ rewrite(select, to = false)
+ } else if (scrutineeSubSelector && !inMatch) {
+ ctx.warning(
+ s"this will always yield true if the scrutinee is non-null, since `${scrutinee.show}` is a subclass of `${selector.show}` (will be optimized away)",
+ pos
+ )
+ rewrite(select, to = true)
+ } else /* if (scrutineeSubSelector && inMatch) */ rewrite(select, to = true)
+ }
+
+ /** Rewrites cases with unrelated types */
+ def handleFalseUnrelated(select: Select, scrutinee: Type, selector: Type, inMatch: Boolean) =
+ if (inMatch) {
+ ctx.error(
+ s"will never match since `${selector.show}` is not a subclass of `${scrutinee.show}`",
+ Position(select.pos.start - 5, select.pos.end - 5)
+ )
+ rewrite(select, to = false)
+ } else {
+ ctx.warning(
+ s"will always yield false since `${scrutinee.show}` is not a subclass of `${selector.show}`",
+ select.pos
+ )
+ rewrite(select, to = false)
+ }
+
+ /** Rewrites the select to a boolean if `to` is false or if the qualifier
+ * is a value class.
+ *
+ * If `to` is set to true and the qualifier is not a primitive, the
+ * instanceOf is replaced by a null check, since:
+ *
+ * `scrutinee.isInstanceOf[Selector]` if `scrutinee eq null`
+ */
+ def rewrite(tree: Select, to: Boolean): Tree =
+ if (!to || !tree.qualifier.tpe.widen.derivesFrom(defn.AnyRefAlias)) {
+ val literal = Literal(Constant(to))
+ if (!isPureExpr(tree.qualifier)) Block(List(tree.qualifier), literal)
+ else literal
+ } else
+ Apply(tree.qualifier.select(defn.Object_ne), List(Literal(Constant(null))))
+
+ /** Attempts to rewrite TypeApply to either `scrutinee ne null` or a
+ * constant
+ */
+ def evalTypeApply(tree: TypeApply): Tree =
+ if (tree.symbol != defn.Any_isInstanceOf) tree
+ else tree.fun match {
+ case s: Select => {
+ val scrutinee = erasure(s.qualifier.tpe.widen)
+ val selector = erasure(tree.args.head.tpe.widen)
+
+ val scTrait = scrutinee.typeSymbol is Trait
+ val scClass =
+ scrutinee.typeSymbol.isClass &&
+ !(scrutinee.typeSymbol is Trait) &&
+ !(scrutinee.typeSymbol is Module)
+
+ val scClassNonFinal = scClass && !(scrutinee.typeSymbol is Final)
+ val scFinalClass = scClass && (scrutinee.typeSymbol is Final)
+
+ val selTrait = selector.typeSymbol is Trait
+ val selClass =
+ selector.typeSymbol.isClass &&
+ !(selector.typeSymbol is Trait) &&
+ !(selector.typeSymbol is Module)
+
+ val selClassNonFinal = selClass && !(selector.typeSymbol is Final)
+ val selFinalClass = selClass && (selector.typeSymbol is Final)
+
+ // Cases ---------------------------------
+ val valueClassesOrAny =
+ ValueClasses.isDerivedValueClass(scrutinee.typeSymbol) ||
+ ValueClasses.isDerivedValueClass(selector.typeSymbol) ||
+ scrutinee == defn.ObjectType
+
+ val knownStatically = scFinalClass
+
+ val falseIfUnrelated =
+ (scClassNonFinal && selClassNonFinal) ||
+ (scClassNonFinal && selFinalClass) ||
+ (scTrait && selFinalClass)
+
+ val happens =
+ (scClassNonFinal && selClassNonFinal) ||
+ (scTrait && selClassNonFinal) ||
+ (scTrait && selTrait)
+
+ val inMatch = s.qualifier.symbol is Case
+
+ if (valueClassesOrAny) tree
+ else if (knownStatically)
+ handleStaticallyKnown(s, scrutinee, selector, inMatch, tree.pos)
+ else if (falseIfUnrelated && scrutinee <:< selector)
+ // scrutinee is a subtype of the selector, safe to rewrite
+ rewrite(s, to = true)
+ else if (falseIfUnrelated && !(selector <:< scrutinee))
+ // selector and scrutinee are unrelated
+ handleFalseUnrelated(s, scrutinee, selector, inMatch)
+ else if (happens) tree
+ else tree
+ }
+
+ case _ => tree
+ }
+
+ evalTypeApply(tree)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
new file mode 100644
index 000000000..19fb3dd0c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
@@ -0,0 +1,548 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import core.StdNames.nme
+import core.Names._
+import core.NameOps._
+import core.Phases._
+import ast.Trees._
+import SymUtils._
+import ExplicitOuter.outer
+import util.Attachment
+import util.NameTransformer
+import util.Positions._
+import collection.{ mutable, immutable }
+import collection.mutable.{ HashMap, HashSet, LinkedHashMap, LinkedHashSet, TreeSet }
+
+object LambdaLift {
+ private val NJ = NameTransformer.NAME_JOIN_STRING
+ private class NoPath extends Exception
+}
+
+/** This phase performs the necessary rewritings to eliminate classes and methods
+ * nested in other methods. In detail:
+ * 1. It adds all free variables of local functions as additional parameters (proxies).
+ * 2. It rebinds references to free variables to the corresponding proxies,
+ * 3. It lifts all local functions and classes out as far as possible, but at least
+ * to the enclosing class.
+ * 4. It stores free variables of non-trait classes as additional fields of the class.
+ * The fields serve as proxies for methods in the class, which avoids the need
+ * of passing additional parameters to these methods.
+ *
+ * A particularly tricky case are local traits. These cannot store free variables
+ * as field proxies, because LambdaLift runs after Mixin, so the fields cannot be
+ * expanded anymore. Instead, methods of local traits get free variables of
+ * the trait as additional proxy parameters. The difference between local classes
+ * and local traits is illustrated by the two rewritings below.
+ *
+ * def f(x: Int) = { def f(x: Int) = new C(x).f2
+ * class C { ==> class C(x$1: Int) {
+ * def f2 = x def f2 = x$1
+ * } }
+ * new C().f2
+ * }
+ *
+ * def f(x: Int) = { def f(x: Int) = new C().f2(x)
+ * trait T { ==> trait T
+ * def f2 = x def f2(x$1: Int) = x$1
+ * } }
+ * class C extends T class C extends T
+ * new C().f2
+ * }
+ */
+class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisTransform =>
+ import LambdaLift._
+ import ast.tpd._
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "lambdaLift"
+ val treeTransform = new LambdaLifter
+
+ override def relaxedTyping = true
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Constructors])
+ // Constructors has to happen before LambdaLift because the lambda lift logic
+ // becomes simpler if it can assume that parameter accessors have already been
+ // converted to parameters in super calls. Without this it is very hard to get
+ // lambda lift for super calls right. Witness the implementation restrictions to
+ // this effect in scalac.
+
+ class LambdaLifter extends TreeTransform {
+ override def phase = thisTransform
+
+ private type SymSet = TreeSet[Symbol]
+
+ /** A map storing free variables of functions and classes */
+ private val free = new LinkedHashMap[Symbol, SymSet]
+
+ /** A map storing the free variable proxies of functions and classes.
+ * For every function and class, this is a map from the free variables
+ * of that function or class to the proxy symbols accessing them.
+ */
+ private val proxyMap = new LinkedHashMap[Symbol, Map[Symbol, Symbol]]
+
+ /** A hashtable storing calls between functions */
+ private val called = new LinkedHashMap[Symbol, SymSet]
+
+ /** Symbols that are called from an inner class. */
+ private val calledFromInner = new HashSet[Symbol]
+
+ /** A map from local methods and classes to the owners to which they will be lifted as members.
+ * For methods and classes that do not have any dependencies this will be the enclosing package.
+ * symbols with packages as lifted owners will subsequently represented as static
+ * members of their toplevel class, unless their enclosing class was already static.
+ * Note: During tree transform (which runs at phase LambdaLift + 1), liftedOwner
+ * is also used to decide whether a method had a term owner before.
+ */
+ private val liftedOwner = new HashMap[Symbol, Symbol]
+
+ /** The outer parameter of a constructor */
+ private val outerParam = new HashMap[Symbol, Symbol]
+
+ /** Buffers for lifted out classes and methods, indexed by owner */
+ private val liftedDefs = new HashMap[Symbol, mutable.ListBuffer[Tree]]
+
+ /** A flag to indicate whether new free variables have been found */
+ private var changedFreeVars: Boolean = _
+
+ /** A flag to indicate whether lifted owners have changed */
+ private var changedLiftedOwner: Boolean = _
+
+ private val ord: Ordering[Symbol] = Ordering.by((_: Symbol).id) // Dotty deviation: Type annotation needed. TODO: figure out why
+ private def newSymSet = TreeSet.empty[Symbol](ord)
+
+ private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
+ f.getOrElseUpdate(sym, newSymSet)
+
+ def freeVars(sym: Symbol): List[Symbol] = free get sym match {
+ case Some(set) => set.toList
+ case None => Nil
+ }
+
+ def proxyOf(sym: Symbol, fv: Symbol) = proxyMap.getOrElse(sym, Map.empty)(fv)
+
+ def proxies(sym: Symbol): List[Symbol] = freeVars(sym).map(proxyOf(sym, _))
+
+ /** A symbol is local if it is owned by a term or a local trait,
+ * or if it is a constructor of a local symbol.
+ */
+ def isLocal(sym: Symbol)(implicit ctx: Context): Boolean = {
+ val owner = sym.maybeOwner
+ owner.isTerm ||
+ owner.is(Trait) && isLocal(owner) ||
+ sym.isConstructor && isLocal(owner)
+ }
+
+ /** Set `liftedOwner(sym)` to `owner` if `owner` is more deeply nested
+ * than the previous value of `liftedowner(sym)`.
+ */
+ def narrowLiftedOwner(sym: Symbol, owner: Symbol)(implicit ctx: Context) =
+ if (sym.maybeOwner.isTerm &&
+ owner.isProperlyContainedIn(liftedOwner(sym)) &&
+ owner != sym) {
+ ctx.log(i"narrow lifted $sym to $owner")
+ changedLiftedOwner = true
+ liftedOwner(sym) = owner
+ }
+
+ /** Mark symbol `sym` as being free in `enclosure`, unless `sym` is defined
+ * in `enclosure` or there is an intermediate class properly containing `enclosure`
+ * in which `sym` is also free. Also, update `liftedOwner` of `enclosure` so
+ * that `enclosure` can access `sym`, or its proxy in an intermediate class.
+ * This means:
+ *
+ * 1. If there is an intermediate class in which `sym` is free, `enclosure`
+ * must be contained in that class (in order to access the `sym proxy stored
+ * in the class).
+ *
+ * 2. If there is no intermediate class, `enclosure` must be contained
+ * in the class enclosing `sym`.
+ *
+ * @return If there is a non-trait class between `enclosure` and
+ * the owner of `sym`, the largest such class.
+ * Otherwise, if there is a trait between `enclosure` and
+ * the owner of `sym`, the largest such trait.
+ * Otherwise, NoSymbol.
+ *
+ * @pre sym.owner.isTerm, (enclosure.isMethod || enclosure.isClass)
+ *
+ * The idea of `markFree` is illustrated with an example:
+ *
+ * def f(x: int) = {
+ * class C {
+ * class D {
+ * val y = x
+ * }
+ * }
+ * }
+ *
+ * In this case `x` is free in the primary constructor of class `C`.
+ * but it is not free in `D`, because after lambda lift the code would be transformed
+ * as follows:
+ *
+ * def f(x$0: int) {
+ * class C(x$0: int) {
+ * val x$1 = x$0
+ * class D {
+ * val y = outer.x$1
+ * }
+ * }
+ * }
+ */
+ private def markFree(sym: Symbol, enclosure: Symbol)(implicit ctx: Context): Symbol = try {
+ if (!enclosure.exists) throw new NoPath
+ if (enclosure == sym.enclosure) NoSymbol
+ else {
+ ctx.debuglog(i"mark free: ${sym.showLocated} with owner ${sym.maybeOwner} marked free in $enclosure")
+ val intermediate =
+ if (enclosure.is(PackageClass)) enclosure
+ else markFree(sym, enclosure.enclosure)
+ narrowLiftedOwner(enclosure, intermediate orElse sym.enclosingClass)
+ if (!intermediate.isRealClass || enclosure.isConstructor) {
+ // Constructors and methods nested inside traits get the free variables
+ // of the enclosing trait or class.
+ // Conversely, local traits do not get free variables.
+ if (!enclosure.is(Trait))
+ if (symSet(free, enclosure).add(sym)) {
+ changedFreeVars = true
+ ctx.log(i"$sym is free in $enclosure")
+ }
+ }
+ if (intermediate.isRealClass) intermediate
+ else if (enclosure.isRealClass) enclosure
+ else if (intermediate.isClass) intermediate
+ else if (enclosure.isClass) enclosure
+ else NoSymbol
+ }
+ } catch {
+ case ex: NoPath =>
+ println(i"error lambda lifting ${ctx.compilationUnit}: $sym is not visible from $enclosure")
+ throw ex
+ }
+
+ private def markCalled(callee: Symbol, caller: Symbol)(implicit ctx: Context): Unit = {
+ ctx.debuglog(i"mark called: $callee of ${callee.owner} is called by $caller in ${caller.owner}")
+ assert(isLocal(callee))
+ symSet(called, caller) += callee
+ if (callee.enclosingClass != caller.enclosingClass) calledFromInner += callee
+ }
+
+ private class CollectDependencies extends EnclosingMethodTraverser {
+ def traverse(enclosure: Symbol, tree: Tree)(implicit ctx: Context) = try { //debug
+ val sym = tree.symbol
+ def narrowTo(thisClass: ClassSymbol) = {
+ val enclClass = enclosure.enclosingClass
+ narrowLiftedOwner(enclosure,
+ if (enclClass.isContainedIn(thisClass)) thisClass
+ else enclClass) // unknown this reference, play it safe and assume the narrowest possible owner
+ }
+ tree match {
+ case tree: Ident =>
+ if (isLocal(sym)) {
+ if (sym is Label)
+ assert(enclosure == sym.enclosure,
+ i"attempt to refer to label $sym from nested $enclosure")
+ else if (sym is Method) markCalled(sym, enclosure)
+ else if (sym.isTerm) markFree(sym, enclosure)
+ }
+ def captureImplicitThis(x: Type): Unit = {
+ x match {
+ case tr@TermRef(x, _) if (!tr.termSymbol.isStatic) => captureImplicitThis(x)
+ case x: ThisType if (!x.tref.typeSymbol.isStaticOwner) => narrowTo(x.tref.typeSymbol.asClass)
+ case _ =>
+ }
+ }
+ captureImplicitThis(tree.tpe)
+ case tree: Select =>
+ if (sym.is(Method) && isLocal(sym)) markCalled(sym, enclosure)
+ case tree: This =>
+ narrowTo(tree.symbol.asClass)
+ case tree: DefDef =>
+ if (sym.owner.isTerm && !sym.is(Label))
+ liftedOwner(sym) = sym.enclosingPackageClass
+ // this will make methods in supercall constructors of top-level classes owned
+ // by the enclosing package, which means they will be static.
+ // On the other hand, all other methods will be indirectly owned by their
+ // top-level class. This avoids possible deadlocks when a static method
+ // has to access its enclosing object from the outside.
+ else if (sym.isConstructor) {
+ if (sym.isPrimaryConstructor && isLocal(sym.owner) && !sym.owner.is(Trait))
+ // add a call edge from the constructor of a local non-trait class to
+ // the class itself. This is done so that the constructor inherits
+ // the free variables of the class.
+ symSet(called, sym) += sym.owner
+
+ tree.vparamss.head.find(_.name == nme.OUTER) match {
+ case Some(vdef) => outerParam(sym) = vdef.symbol
+ case _ =>
+ }
+ }
+ case tree: TypeDef =>
+ if (sym.owner.isTerm) liftedOwner(sym) = sym.topLevelClass.owner
+ case tree: Template =>
+ liftedDefs(tree.symbol.owner) = new mutable.ListBuffer
+ case _ =>
+ }
+ foldOver(enclosure, tree)
+ } catch { //debug
+ case ex: Exception =>
+ println(i"$ex while traversing $tree")
+ throw ex
+ }
+ }
+
+ /** Compute final free variables map `fvs by closing over caller dependencies. */
+ private def computeFreeVars()(implicit ctx: Context): Unit =
+ do {
+ changedFreeVars = false
+ for {
+ caller <- called.keys
+ callee <- called(caller)
+ fvs <- free get callee
+ fv <- fvs
+ } markFree(fv, caller)
+ } while (changedFreeVars)
+
+ /** Compute final liftedOwner map by closing over caller dependencies */
+ private def computeLiftedOwners()(implicit ctx: Context): Unit =
+ do {
+ changedLiftedOwner = false
+ for {
+ caller <- called.keys
+ callee <- called(caller)
+ } {
+ val normalizedCallee = callee.skipConstructor
+ val calleeOwner = normalizedCallee.owner
+ if (calleeOwner.isTerm) narrowLiftedOwner(caller, liftedOwner(normalizedCallee))
+ else {
+ assert(calleeOwner.is(Trait))
+ // methods nested inside local trait methods cannot be lifted out
+ // beyond the trait. Note that we can also call a trait method through
+ // a qualifier; in that case no restriction to lifted owner arises.
+ if (caller.isContainedIn(calleeOwner))
+ narrowLiftedOwner(caller, calleeOwner)
+ }
+ }
+ } while (changedLiftedOwner)
+
+ private def newName(sym: Symbol)(implicit ctx: Context): Name =
+ if (sym.isAnonymousFunction && sym.owner.is(Method, butNot = Label))
+ (sym.name ++ NJ ++ sym.owner.name).freshened
+ else sym.name.freshened
+
+ private def generateProxies()(implicit ctx: Context): Unit =
+ for ((owner, freeValues) <- free.toIterator) {
+ val newFlags = Synthetic | (if (owner.isClass) ParamAccessor | Private else Param)
+ ctx.debuglog(i"free var proxy: ${owner.showLocated}, ${freeValues.toList}%, %")
+ proxyMap(owner) = {
+ for (fv <- freeValues.toList) yield {
+ val proxyName = newName(fv)
+ val proxy = ctx.newSymbol(owner, proxyName.asTermName, newFlags, fv.info, coord = fv.coord)
+ if (owner.isClass) proxy.enteredAfter(thisTransform)
+ (fv, proxy)
+ }
+ }.toMap
+ }
+
+ private def liftedInfo(local: Symbol)(implicit ctx: Context): Type = local.info match {
+ case mt @ MethodType(pnames, ptypes) =>
+ val ps = proxies(local)
+ MethodType(
+ ps.map(_.name.asTermName) ++ pnames,
+ ps.map(_.info) ++ ptypes,
+ mt.resultType)
+ case info => info
+ }
+
+ private def liftLocals()(implicit ctx: Context): Unit = {
+ for ((local, lOwner) <- liftedOwner) {
+ val (newOwner, maybeStatic) =
+ if (lOwner is Package) {
+ val encClass = local.enclosingClass
+ val topClass = local.topLevelClass
+ val preferEncClass =
+ encClass.isStatic &&
+ // non-static classes can capture owners, so should be avoided
+ (encClass.isProperlyContainedIn(topClass) ||
+ // can be false for symbols which are defined in some weird combination of supercalls.
+ encClass.is(ModuleClass, butNot = Package)
+ // needed to not cause deadlocks in classloader. see t5375.scala
+ )
+ if (preferEncClass) (encClass, EmptyFlags)
+ else (topClass, JavaStatic)
+ }
+ else (lOwner, EmptyFlags)
+ local.copySymDenotation(
+ owner = newOwner,
+ name = newName(local),
+ initFlags = local.flags &~ (InSuperCall | Module) | Private | maybeStatic,
+ // drop Module because class is no longer a singleton in the lifted context.
+ info = liftedInfo(local)).installAfter(thisTransform)
+ }
+ for (local <- free.keys)
+ if (!liftedOwner.contains(local))
+ local.copySymDenotation(info = liftedInfo(local)).installAfter(thisTransform)
+ }
+
+ private def init(implicit ctx: Context) = {
+ (new CollectDependencies).traverse(NoSymbol, ctx.compilationUnit.tpdTree)
+ computeFreeVars()
+ computeLiftedOwners()
+ generateProxies()(ctx.withPhase(thisTransform.next))
+ liftLocals()(ctx.withPhase(thisTransform.next))
+ }
+
+ override def prepareForUnit(tree: Tree)(implicit ctx: Context) = {
+ val lifter = new LambdaLifter
+ lifter.init(ctx.withPhase(thisTransform))
+ lifter
+ }
+
+ private def currentEnclosure(implicit ctx: Context) =
+ ctx.owner.enclosingMethodOrClass
+
+ private def inCurrentOwner(sym: Symbol)(implicit ctx: Context) =
+ sym.enclosure == currentEnclosure
+
+ private def proxy(sym: Symbol)(implicit ctx: Context): Symbol = {
+ def liftedEnclosure(sym: Symbol) = liftedOwner.getOrElse(sym, sym.enclosure)
+ def searchIn(enclosure: Symbol): Symbol = {
+ if (!enclosure.exists) {
+ def enclosures(encl: Symbol): List[Symbol] =
+ if (encl.exists) encl :: enclosures(liftedEnclosure(encl)) else Nil
+ throw new IllegalArgumentException(i"Could not find proxy for ${sym.showDcl} in ${sym.ownersIterator.toList}, encl = $currentEnclosure, owners = ${currentEnclosure.ownersIterator.toList}%, %; enclosures = ${enclosures(currentEnclosure)}%, %")
+ }
+ ctx.debuglog(i"searching for $sym(${sym.owner}) in $enclosure")
+ proxyMap get enclosure match {
+ case Some(pmap) =>
+ pmap get sym match {
+ case Some(proxy) => return proxy
+ case none =>
+ }
+ case none =>
+ }
+ searchIn(liftedEnclosure(enclosure))
+ }
+ if (inCurrentOwner(sym)) sym else searchIn(currentEnclosure)
+ }
+
+ private def memberRef(sym: Symbol)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val clazz = sym.enclosingClass
+ val qual =
+ if (clazz.isStaticOwner || ctx.owner.enclosingClass == clazz)
+ singleton(clazz.thisType)
+ else if (ctx.owner.isConstructor)
+ outerParam.get(ctx.owner) match {
+ case Some(param) => outer.path(clazz, Ident(param.termRef))
+ case _ => outer.path(clazz)
+ }
+ else outer.path(clazz)
+ transformFollowingDeep(qual.select(sym))
+ }
+
+ private def proxyRef(sym: Symbol)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val psym = proxy(sym)(ctx.withPhase(thisTransform))
+ transformFollowingDeep(if (psym.owner.isTerm) ref(psym) else memberRef(psym))
+ }
+
+ private def addFreeArgs(sym: Symbol, args: List[Tree])(implicit ctx: Context, info: TransformerInfo) =
+ free get sym match {
+ case Some(fvs) => fvs.toList.map(proxyRef(_)) ++ args
+ case _ => args
+ }
+
+ private def addFreeParams(tree: Tree, proxies: List[Symbol])(implicit ctx: Context, info: TransformerInfo): Tree = proxies match {
+ case Nil => tree
+ case proxies =>
+ val sym = tree.symbol
+ val freeParamDefs = proxies.map(proxy =>
+ transformFollowingDeep(ValDef(proxy.asTerm).withPos(tree.pos)).asInstanceOf[ValDef])
+ def proxyInit(field: Symbol, param: Symbol) =
+ transformFollowingDeep(memberRef(field).becomes(ref(param)))
+
+ /** Initialize proxy fields from proxy parameters and map `rhs` from fields to parameters */
+ def copyParams(rhs: Tree) = {
+ val fvs = freeVars(sym.owner)
+ val classProxies = fvs.map(proxyOf(sym.owner, _))
+ val constrProxies = fvs.map(proxyOf(sym, _))
+ ctx.debuglog(i"copy params ${constrProxies.map(_.showLocated)}%, % to ${classProxies.map(_.showLocated)}%, %}")
+ seq((classProxies, constrProxies).zipped.map(proxyInit), rhs)
+ }
+
+ tree match {
+ case tree: DefDef =>
+ cpy.DefDef(tree)(
+ vparamss = tree.vparamss.map(freeParamDefs ++ _),
+ rhs =
+ if (sym.isPrimaryConstructor && !sym.owner.is(Trait)) copyParams(tree.rhs)
+ else tree.rhs)
+ case tree: Template =>
+ cpy.Template(tree)(body = freeParamDefs ++ tree.body)
+ }
+ }
+
+ private def liftDef(tree: MemberDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val buf = liftedDefs(tree.symbol.owner)
+ transformFollowing(rename(tree, tree.symbol.name)).foreachInThicket(buf += _)
+ EmptyTree
+ }
+
+ private def needsLifting(sym: Symbol) = liftedOwner contains sym
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) = {
+ val sym = tree.symbol
+ tree.tpe match {
+ case tpe @ TermRef(prefix, _) =>
+ if (prefix eq NoPrefix)
+ if (sym.enclosure != currentEnclosure && !sym.isStatic)
+ (if (sym is Method) memberRef(sym) else proxyRef(sym)).withPos(tree.pos)
+ else if (sym.owner.isClass) // sym was lifted out
+ ref(sym).withPos(tree.pos)
+ else
+ tree
+ else if (!prefixIsElidable(tpe)) ref(tpe)
+ else tree
+ case _ =>
+ tree
+ }
+ }
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) =
+ cpy.Apply(tree)(tree.fun, addFreeArgs(tree.symbol, tree.args)).withPos(tree.pos)
+
+ override def transformClosure(tree: Closure)(implicit ctx: Context, info: TransformerInfo) =
+ cpy.Closure(tree)(env = addFreeArgs(tree.meth.symbol, tree.env))
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
+ val sym = tree.symbol
+ val paramsAdded =
+ if (free.contains(sym)) addFreeParams(tree, proxies(sym)).asInstanceOf[DefDef]
+ else tree
+ if (needsLifting(sym)) liftDef(paramsAdded)
+ else paramsAdded
+ }
+
+ override def transformReturn(tree: Return)(implicit ctx: Context, info: TransformerInfo) = tree.expr match {
+ case Block(stats, value) =>
+ Block(stats, Return(value, tree.from)).withPos(tree.pos)
+ case _ =>
+ tree
+ }
+
+ override def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo) = {
+ val cls = ctx.owner
+ val impl = addFreeParams(tree, proxies(cls)).asInstanceOf[Template]
+ cpy.Template(impl)(body = impl.body ++ liftedDefs.remove(cls).get)
+ }
+
+ override def transformTypeDef(tree: TypeDef)(implicit ctx: Context, info: TransformerInfo) =
+ if (needsLifting(tree.symbol)) liftDef(tree) else tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
new file mode 100644
index 000000000..e63a7c3a7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
@@ -0,0 +1,418 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.core.Annotations.Annotation
+import dotty.tools.dotc.core.Phases.NeedsCompanions
+
+import scala.collection.mutable
+import core._
+import Contexts._
+import Symbols._
+import Decorators._
+import NameOps._
+import StdNames.nme
+import rewrite.Rewrites.patch
+import util.Positions.Position
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransformer, MiniPhaseTransform}
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.{untpd, tpd}
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Types.{ExprType, NoType, MethodType}
+import dotty.tools.dotc.core.Names.Name
+import SymUtils._
+import scala.collection.mutable.ListBuffer
+import dotty.tools.dotc.core.Denotations.SingleDenotation
+import dotty.tools.dotc.core.SymDenotations.SymDenotation
+import dotty.tools.dotc.core.DenotTransformers.{SymTransformer, IdentityDenotTransformer, DenotTransformer}
+import Erasure.Boxing.adaptToType
+
+class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer {
+ import LazyVals._
+
+ import tpd._
+
+ def transformer = new LazyVals
+
+ val containerFlags = Flags.Synthetic | Flags.Mutable | Flags.Lazy
+ val initFlags = Flags.Synthetic | Flags.Method
+
+ val containerFlagsMask = Flags.Method | Flags.Lazy | Flags.Accessor | Flags.Module
+
+ /** this map contains mutable state of transformation: OffsetDefs to be appended to companion object definitions,
+ * and number of bits currently used */
+ class OffsetInfo(var defs: List[Tree], var ord:Int)
+ val appendOffsetDefs = mutable.Map.empty[Symbol, OffsetInfo]
+
+ override def phaseName: String = "LazyVals"
+
+ /** List of names of phases that should have finished processing of tree
+ * before this phase starts processing same tree */
+ override def runsAfter = Set(classOf[Mixin])
+
+ override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree =
+ transformLazyVal(tree)
+
+
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ transformLazyVal(tree)
+ }
+
+ def transformLazyVal(tree: ValOrDefDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val sym = tree.symbol
+ if (!(sym is Flags.Lazy) || sym.owner.is(Flags.Trait) || (sym.isStatic && sym.is(Flags.Module))) tree
+ else {
+ val isField = sym.owner.isClass
+ if (isField) {
+ if (sym.isVolatile ||
+ (sym.is(Flags.Module)/* || ctx.scala2Mode*/) &&
+ // TODO assume @volatile once LazyVals uses static helper constructs instead of
+ // ones in the companion object.
+ !sym.is(Flags.Synthetic))
+ // module class is user-defined.
+ // Should be threadsafe, to mimic safety guaranteed by global object
+ transformMemberDefVolatile(tree)
+ else if (sym.is(Flags.Module)) // synthetic module
+ transformSyntheticModule(tree)
+ else
+ transformMemberDefNonVolatile(tree)
+ }
+ else transformLocalDef(tree)
+ }
+ }
+
+
+ /** Append offset fields to companion objects
+ */
+ override def transformTemplate(template: tpd.Template)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ val cls = ctx.owner.asClass
+
+ appendOffsetDefs.get(cls) match {
+ case None => template
+ case Some(data) =>
+ data.defs.foreach(_.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot)))
+ cpy.Template(template)(body = addInFront(data.defs, template.body))
+ }
+
+ }
+
+ private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match {
+ case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest
+ case _ => prefix ::: stats
+ }
+
+ /** Make an eager val that would implement synthetic module.
+ * Eager val ensures thread safety and has less code generated.
+ *
+ */
+ def transformSyntheticModule(tree: ValOrDefDef)(implicit ctx: Context) = {
+ val sym = tree.symbol
+ val holderSymbol = ctx.newSymbol(sym.owner, sym.asTerm.name.lazyLocalName,
+ Flags.Synthetic, sym.info.widen.resultType).enteredAfter(this)
+ val field = ValDef(holderSymbol, tree.rhs.changeOwnerAfter(sym, holderSymbol, this))
+ val getter = DefDef(sym.asTerm, ref(holderSymbol))
+ Thicket(field, getter)
+ }
+
+ /** Replace a local lazy val inside a method,
+ * with a LazyHolder from
+ * dotty.runtime(eg dotty.runtime.LazyInt)
+ */
+ def transformLocalDef(x: ValOrDefDef)(implicit ctx: Context) = {
+ val valueInitter = x.rhs
+ val holderName = ctx.freshName(x.name.asTermName.lazyLocalName).toTermName
+ val initName = ctx.freshName(x.name ++ StdNames.nme.LAZY_LOCAL_INIT).toTermName
+ val tpe = x.tpe.widen.resultType.widen
+
+ val holderType =
+ if (tpe isRef defn.IntClass) "LazyInt"
+ else if (tpe isRef defn.LongClass) "LazyLong"
+ else if (tpe isRef defn.BooleanClass) "LazyBoolean"
+ else if (tpe isRef defn.FloatClass) "LazyFloat"
+ else if (tpe isRef defn.DoubleClass) "LazyDouble"
+ else if (tpe isRef defn.ByteClass) "LazyByte"
+ else if (tpe isRef defn.CharClass) "LazyChar"
+ else if (tpe isRef defn.ShortClass) "LazyShort"
+ else "LazyRef"
+
+
+ val holderImpl = ctx.requiredClass("dotty.runtime." + holderType)
+
+ val holderSymbol = ctx.newSymbol(x.symbol.owner, holderName, containerFlags, holderImpl.typeRef, coord = x.pos)
+ val initSymbol = ctx.newSymbol(x.symbol.owner, initName, initFlags, MethodType(Nil, tpe), coord = x.pos)
+ val result = ref(holderSymbol).select(lazyNme.value)
+ val flag = ref(holderSymbol).select(lazyNme.initialized)
+ val initer = valueInitter.changeOwner(x.symbol, initSymbol)
+ val initBody =
+ adaptToType(
+ ref(holderSymbol).select(defn.Object_synchronized).appliedTo(
+ adaptToType(mkNonThreadSafeDef(result, flag, initer), defn.ObjectType)),
+ tpe)
+ val initTree = DefDef(initSymbol, initBody)
+ val holderTree = ValDef(holderSymbol, New(holderImpl.typeRef, List()))
+ val methodBody = tpd.If(flag.ensureApplied,
+ result.ensureApplied,
+ ref(initSymbol).ensureApplied).ensureConforms(tpe)
+
+ val methodTree = DefDef(x.symbol.asTerm, methodBody)
+ ctx.debuglog(s"found a lazy val ${x.show},\n rewrote with ${holderTree.show}")
+ Thicket(holderTree, initTree, methodTree)
+ }
+
+
+ override def transformStats(trees: List[tpd.Tree])(implicit ctx: Context, info: TransformerInfo): List[tpd.Tree] = {
+ // backend requires field usage to be after field definition
+ // need to bring containers to start of method
+ val (holders, stats) =
+ atGroupEnd { implicit ctx: Context =>
+ trees.partition {
+ _.symbol.flags.&~(Flags.Touched) == containerFlags
+ // Filtering out Flags.Touched is not required currently, as there are no LazyTypes involved here
+ // but just to be more safe
+ }
+ }
+ holders:::stats
+ }
+
+ /** Create non-threadsafe lazy accessor equivalent to such code
+ * def methodSymbol() = {
+ * if (flag) target
+ * else {
+ * target = rhs
+ * flag = true
+ * target
+ * }
+ * }
+ */
+
+ def mkNonThreadSafeDef(target: Tree, flag: Tree, rhs: Tree)(implicit ctx: Context) = {
+ val setFlag = flag.becomes(Literal(Constants.Constant(true)))
+ val setTargets = if (isWildcardArg(rhs)) Nil else target.becomes(rhs) :: Nil
+ val init = Block(setFlag :: setTargets, target.ensureApplied)
+ If(flag.ensureApplied, target.ensureApplied, init)
+ }
+
+ /** Create non-threadsafe lazy accessor for not-nullable types equivalent to such code
+ * def methodSymbol() = {
+ * if (target eq null) {
+ * target = rhs
+ * target
+ * } else target
+ * }
+ */
+ def mkDefNonThreadSafeNonNullable(target: Symbol, rhs: Tree)(implicit ctx: Context) = {
+ val cond = ref(target).select(nme.eq).appliedTo(Literal(Constant(null)))
+ val exp = ref(target)
+ val setTarget = exp.becomes(rhs)
+ val init = Block(List(setTarget), exp)
+ If(cond, init, exp)
+ }
+
+ def transformMemberDefNonVolatile(x: ValOrDefDef)(implicit ctx: Context) = {
+ val claz = x.symbol.owner.asClass
+ val tpe = x.tpe.widen.resultType.widen
+ assert(!(x.symbol is Flags.Mutable))
+ val containerName = ctx.freshName(x.name.asTermName.lazyLocalName).toTermName
+ val containerSymbol = ctx.newSymbol(claz, containerName,
+ x.symbol.flags &~ containerFlagsMask | containerFlags | Flags.Private,
+ tpe, coord = x.symbol.coord
+ ).enteredAfter(this)
+
+ val containerTree = ValDef(containerSymbol, defaultValue(tpe))
+ if (x.tpe.isNotNull && tpe <:< defn.ObjectType) { // can use 'null' value instead of flag
+ val slowPath = DefDef(x.symbol.asTerm, mkDefNonThreadSafeNonNullable(containerSymbol, x.rhs))
+ Thicket(containerTree, slowPath)
+ }
+ else {
+ val flagName = ctx.freshName(x.name ++ StdNames.nme.BITMAP_PREFIX).toTermName
+ val flagSymbol = ctx.newSymbol(x.symbol.owner, flagName, containerFlags | Flags.Private, defn.BooleanType).enteredAfter(this)
+ val flag = ValDef(flagSymbol, Literal(Constants.Constant(false)))
+ val slowPath = DefDef(x.symbol.asTerm, mkNonThreadSafeDef(ref(containerSymbol), ref(flagSymbol), x.rhs))
+ Thicket(containerTree, flag, slowPath)
+ }
+ }
+
+ /** Create a threadsafe lazy accessor equivalent to such code
+ *
+ * def methodSymbol(): Int = {
+ * val result: Int = 0
+ * val retry: Boolean = true
+ * var flag: Long = 0L
+ * while retry do {
+ * flag = dotty.runtime.LazyVals.get(this, $claz.$OFFSET)
+ * dotty.runtime.LazyVals.STATE(flag, 0) match {
+ * case 0 =>
+ * if dotty.runtime.LazyVals.CAS(this, $claz.$OFFSET, flag, 1, $ord) {
+ * try {result = rhs} catch {
+ * case x: Throwable =>
+ * dotty.runtime.LazyVals.setFlag(this, $claz.$OFFSET, 0, $ord)
+ * throw x
+ * }
+ * $target = result
+ * dotty.runtime.LazyVals.setFlag(this, $claz.$OFFSET, 3, $ord)
+ * retry = false
+ * }
+ * case 1 =>
+ * dotty.runtime.LazyVals.wait4Notification(this, $claz.$OFFSET, flag, $ord)
+ * case 2 =>
+ * dotty.runtime.LazyVals.wait4Notification(this, $claz.$OFFSET, flag, $ord)
+ * case 3 =>
+ * retry = false
+ * result = $target
+ * }
+ * }
+ * result
+ * }
+ */
+ def mkThreadSafeDef(methodSymbol: TermSymbol, claz: ClassSymbol, ord: Int, target: Symbol, rhs: Tree, tp: Types.Type, offset: Tree, getFlag: Tree, stateMask: Tree, casFlag: Tree, setFlagState: Tree, waitOnLock: Tree)(implicit ctx: Context) = {
+ val initState = Literal(Constants.Constant(0))
+ val computeState = Literal(Constants.Constant(1))
+ val notifyState = Literal(Constants.Constant(2))
+ val computedState = Literal(Constants.Constant(3))
+ val flagSymbol = ctx.newSymbol(methodSymbol, lazyNme.flag, containerFlags, defn.LongType)
+ val flagDef = ValDef(flagSymbol, Literal(Constant(0L)))
+
+ val thiz = This(claz)(ctx.fresh.setOwner(claz))
+
+ val resultSymbol = ctx.newSymbol(methodSymbol, lazyNme.result, containerFlags, tp)
+ val resultDef = ValDef(resultSymbol, defaultValue(tp))
+
+ val retrySymbol = ctx.newSymbol(methodSymbol, lazyNme.retry, containerFlags, defn.BooleanType)
+ val retryDef = ValDef(retrySymbol, Literal(Constants.Constant(true)))
+
+ val whileCond = ref(retrySymbol)
+
+ val compute = {
+ val handlerSymbol = ctx.newSymbol(methodSymbol, nme.ANON_FUN, Flags.Synthetic,
+ MethodType(List(nme.x_1), List(defn.ThrowableType), defn.IntType))
+ val caseSymbol = ctx.newSymbol(methodSymbol, nme.DEFAULT_EXCEPTION_NAME, Flags.Synthetic, defn.ThrowableType)
+ val triggerRetry = setFlagState.appliedTo(thiz, offset, initState, Literal(Constant(ord)))
+ val complete = setFlagState.appliedTo(thiz, offset, computedState, Literal(Constant(ord)))
+
+ val handler = CaseDef(Bind(caseSymbol, ref(caseSymbol)), EmptyTree,
+ Block(List(triggerRetry), Throw(ref(caseSymbol))
+ ))
+
+ val compute = ref(resultSymbol).becomes(rhs)
+ val tr = Try(compute, List(handler), EmptyTree)
+ val assign = ref(target).becomes(ref(resultSymbol))
+ val noRetry = ref(retrySymbol).becomes(Literal(Constants.Constant(false)))
+ val body = If(casFlag.appliedTo(thiz, offset, ref(flagSymbol), computeState, Literal(Constant(ord))),
+ Block(tr :: assign :: complete :: noRetry :: Nil, Literal(Constant(()))),
+ Literal(Constant(())))
+
+ CaseDef(initState, EmptyTree, body)
+ }
+
+ val waitFirst = {
+ val wait = waitOnLock.appliedTo(thiz, offset, ref(flagSymbol), Literal(Constant(ord)))
+ CaseDef(computeState, EmptyTree, wait)
+ }
+
+ val waitSecond = {
+ val wait = waitOnLock.appliedTo(thiz, offset, ref(flagSymbol), Literal(Constant(ord)))
+ CaseDef(notifyState, EmptyTree, wait)
+ }
+
+ val computed = {
+ val noRetry = ref(retrySymbol).becomes(Literal(Constants.Constant(false)))
+ val result = ref(resultSymbol).becomes(ref(target))
+ val body = Block(noRetry :: result :: Nil, Literal(Constant(())))
+ CaseDef(computedState, EmptyTree, body)
+ }
+
+ val default = CaseDef(Underscore(defn.LongType), EmptyTree, Literal(Constant(())))
+
+ val cases = Match(stateMask.appliedTo(ref(flagSymbol), Literal(Constant(ord))),
+ List(compute, waitFirst, waitSecond, computed, default)) //todo: annotate with @switch
+
+ val whileBody = List(ref(flagSymbol).becomes(getFlag.appliedTo(thiz, offset)), cases)
+ val cycle = WhileDo(methodSymbol, whileCond, whileBody)
+ DefDef(methodSymbol, Block(resultDef :: retryDef :: flagDef :: cycle :: Nil, ref(resultSymbol)))
+ }
+
+ def transformMemberDefVolatile(x: ValOrDefDef)(implicit ctx: Context) = {
+ assert(!(x.symbol is Flags.Mutable))
+
+ val tpe = x.tpe.widen.resultType.widen
+ val claz = x.symbol.owner.asClass
+ val thizClass = Literal(Constant(claz.info))
+ val helperModule = ctx.requiredModule("dotty.runtime.LazyVals")
+ val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset)
+ var offsetSymbol: TermSymbol = null
+ var flag: Tree = EmptyTree
+ var ord = 0
+
+ def offsetName(id: Int) = (StdNames.nme.LAZY_FIELD_OFFSET + (if(x.symbol.owner.is(Flags.Module)) "_m_" else "") + id.toString).toTermName
+
+ // compute or create appropriate offsetSymol, bitmap and bits used by current ValDef
+ appendOffsetDefs.get(claz) match {
+ case Some(info) =>
+ val flagsPerLong = (64 / dotty.runtime.LazyVals.BITS_PER_LAZY_VAL).toInt
+ info.ord += 1
+ ord = info.ord % flagsPerLong
+ val id = info.ord / flagsPerLong
+ val offsetById = offsetName(id)
+ if (ord != 0) { // there are unused bits in already existing flag
+ offsetSymbol = claz.info.decl(offsetById)
+ .suchThat(sym => (sym is Flags.Synthetic) && sym.isTerm)
+ .symbol.asTerm
+ } else { // need to create a new flag
+ offsetSymbol = ctx.newSymbol(claz, offsetById, Flags.Synthetic, defn.LongType).enteredAfter(this)
+ offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))
+ val flagName = (StdNames.nme.BITMAP_PREFIX + id.toString).toTermName
+ val flagSymbol = ctx.newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this)
+ flag = ValDef(flagSymbol, Literal(Constants.Constant(0L)))
+ val offsetTree = ValDef(offsetSymbol, getOffset.appliedTo(thizClass, Literal(Constant(flagName.toString))))
+ info.defs = offsetTree :: info.defs
+ }
+
+ case None =>
+ offsetSymbol = ctx.newSymbol(claz, offsetName(0), Flags.Synthetic, defn.LongType).enteredAfter(this)
+ offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))
+ val flagName = (StdNames.nme.BITMAP_PREFIX + "0").toTermName
+ val flagSymbol = ctx.newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this)
+ flag = ValDef(flagSymbol, Literal(Constants.Constant(0L)))
+ val offsetTree = ValDef(offsetSymbol, getOffset.appliedTo(thizClass, Literal(Constant(flagName.toString))))
+ appendOffsetDefs += (claz -> new OffsetInfo(List(offsetTree), ord))
+ }
+
+ val containerName = ctx.freshName(x.name.asTermName.lazyLocalName).toTermName
+ val containerSymbol = ctx.newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this)
+
+ val containerTree = ValDef(containerSymbol, defaultValue(tpe))
+
+ val offset = ref(offsetSymbol)
+ val getFlag = Select(ref(helperModule), lazyNme.RLazyVals.get)
+ val setFlag = Select(ref(helperModule), lazyNme.RLazyVals.setFlag)
+ val wait = Select(ref(helperModule), lazyNme.RLazyVals.wait4Notification)
+ val state = Select(ref(helperModule), lazyNme.RLazyVals.state)
+ val cas = Select(ref(helperModule), lazyNme.RLazyVals.cas)
+
+ val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait)
+ if (flag eq EmptyTree)
+ Thicket(containerTree, accessor)
+ else Thicket(containerTree, flag, accessor)
+ }
+}
+
+object LazyVals {
+ object lazyNme {
+ object RLazyVals {
+ import dotty.runtime.LazyVals._
+ val get = Names.get.toTermName
+ val setFlag = Names.setFlag.toTermName
+ val wait4Notification = Names.wait4Notification.toTermName
+ val state = Names.state.toTermName
+ val cas = Names.cas.toTermName
+ val getOffset = Names.getOffset.toTermName
+ }
+ val flag = "flag".toTermName
+ val result = "result".toTermName
+ val value = "value".toTermName
+ val initialized = "initialized".toTermName
+ val retry = "retry".toTermName
+ }
+}
+
+
+
diff --git a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
new file mode 100644
index 000000000..6a273b91e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
@@ -0,0 +1,66 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import NonLocalReturns._
+
+/** Lifts try's that might be executed on non-empty expression stacks
+ * to their own methods. I.e.
+ *
+ * try body catch handler
+ *
+ * is lifted to
+ *
+ * { def liftedTree$n() = try body catch handler; liftedTree$n() }
+ */
+class LiftTry extends MiniPhase with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "liftTry"
+
+ val treeTransform = new Transform(needLift = false)
+ val liftingTransform = new Transform(needLift = true)
+
+ class Transform(needLift: Boolean) extends TreeTransform {
+ def phase = thisTransform
+
+ override def prepareForApply(tree: Apply)(implicit ctx: Context) =
+ if (tree.fun.symbol.is(Label)) this
+ else liftingTransform
+
+ override def prepareForValDef(tree: ValDef)(implicit ctx: Context) =
+ if (!tree.symbol.exists ||
+ tree.symbol.isSelfSym ||
+ tree.symbol.owner == ctx.owner.enclosingMethod) this
+ else liftingTransform
+
+ override def prepareForAssign(tree: Assign)(implicit ctx: Context) =
+ if (tree.lhs.symbol.maybeOwner == ctx.owner.enclosingMethod) this
+ else liftingTransform
+
+ override def prepareForReturn(tree: Return)(implicit ctx: Context) =
+ if (!isNonLocalReturn(tree)) this
+ else liftingTransform
+
+ override def prepareForTemplate(tree: Template)(implicit ctx: Context) =
+ treeTransform
+
+ override def transformTry(tree: Try)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (needLift) {
+ ctx.debuglog(i"lifting tree at ${tree.pos}, current owner = ${ctx.owner}")
+ val fn = ctx.newSymbol(
+ ctx.owner, ctx.freshName("liftedTree").toTermName, Synthetic | Method,
+ MethodType(Nil, tree.tpe), coord = tree.pos)
+ tree.changeOwnerAfter(ctx.owner, fn, thisTransform)
+ Block(DefDef(fn, tree) :: Nil, ref(fn).appliedToNone)
+ }
+ else tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/LinkScala2ImplClasses.scala b/compiler/src/dotty/tools/dotc/transform/LinkScala2ImplClasses.scala
new file mode 100644
index 000000000..ca06938dc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/LinkScala2ImplClasses.scala
@@ -0,0 +1,62 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import Contexts.Context
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import Phases._
+import ast.untpd
+import ast.Trees._
+import collection.mutable
+
+/** Rewrite calls
+ *
+ * super[M].f(args)
+ *
+ * where M is a Scala2 trait implemented by the current class to
+ *
+ * M$class.f(this, args)
+ *
+ * provided the implementation class M$class defines a corresponding function `f`.
+ */
+class LinkScala2ImplClasses extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "linkScala2ImplClasses"
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Mixin])
+
+ override def transformApply(app: Apply)(implicit ctx: Context, info: TransformerInfo) = {
+ def currentClass = ctx.owner.enclosingClass.asClass
+ app match {
+ case Apply(sel @ Select(Super(_, _), _), args)
+ if sel.symbol.owner.is(Scala2xTrait) && currentClass.mixins.contains(sel.symbol.owner) =>
+ val impl = implMethod(sel.symbol)
+ if (impl.exists) Apply(ref(impl), This(currentClass) :: args).withPos(app.pos)
+ else app // could have been an abstract method in a trait linked to from a super constructor
+ case _ =>
+ app
+ }
+ }
+
+ private def implMethod(meth: Symbol)(implicit ctx: Context): Symbol = {
+ val implInfo = meth.owner.implClass.info
+ if (meth.isConstructor)
+ implInfo.decl(nme.TRAIT_CONSTRUCTOR).symbol
+ else
+ implInfo.decl(meth.name)
+ .suchThat(c => FullParameterization.memberSignature(c.info) == meth.signature)
+ .symbol
+ }
+
+ private val Scala2xTrait = allOf(Scala2x, Trait)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled
new file mode 100644
index 000000000..f33baa52b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled
@@ -0,0 +1,95 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers._
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import core.StdNames.nme
+import ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Constants._
+
+/** This phase rewrites idempotent expressions with constant types to Literals.
+ * The constant types are eliminated by erasure, so we need to keep
+ * the info about constantness in the trees.
+ *
+ * The phase also makes sure that the constant of a literal is the same as the constant
+ * in the type of the literal.
+ */
+class Literalize extends MiniPhaseTransform { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "literalize"
+
+ /** Note: Demanding idempotency instead of purity is strictly speaking too loose.
+ * Example
+ *
+ * object O { final val x = 42; println("43") }
+ * O.x
+ *
+ * Strictly speaking we can't replace `O.x` with `42`. But this would make
+ * most expressions non-constant. Maybe we can change the spec to accept this
+ * kind of eliding behavior. Or else enforce true purity in the compiler.
+ * The choice will be affected by what we will do with `inline` and with
+ * Singleton type bounds (see SIP 23). Presumably
+ *
+ * object O1 { val x: Singleton = 42; println("43") }
+ * object O2 { inline val x = 42; println("43") }
+ *
+ * should behave differently.
+ *
+ * O1.x should have the same effect as { println("43"; 42 }
+ *
+ * whereas
+ *
+ * O2.x = 42
+ *
+ * Revisit this issue once we have implemented `inline`. Then we can demand
+ * purity of the prefix unless the selection goes to an inline val.
+ */
+ def literalize(tree: Tree)(implicit ctx: Context): Tree = {
+ def recur(tp: Type): Tree = tp match {
+ case ConstantType(value) if isIdempotentExpr(tree) => Literal(value)
+ case tp: TermRef if tp.symbol.isStable => recur(tp.info.widenExpr)
+ case _ => tree
+ }
+ recur(tree.tpe)
+ }
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo): Tree =
+ literalize(tree)
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree =
+ literalize(tree)
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ literalize(tree)
+
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ literalize(tree)
+
+ override def transformLiteral(tree: Literal)(implicit ctx: Context, info: TransformerInfo): Tree = tree.tpe match {
+ case ConstantType(const) if tree.const.value != const.value || (tree.const.tag != const.tag) => Literal(const)
+ case _ => tree
+ }
+
+ /** Check that all literals have types match underlying constants
+ */
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
+ tree match {
+ case Literal(c @ Constant(treeValue)) =>
+ tree.tpe match {
+ case ConstantType(c2 @ Constant(typeValue)) =>
+ assert(treeValue == typeValue && c2.tag == c.tag,
+ i"Type of Literal $tree is inconsistent with underlying constant")
+ case tpe =>
+ assert(c.tpe =:= tpe, i"Type of Literal $tree is inconsistent with underlying constant type ${c.tpe}")
+ }
+ case _ =>
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
new file mode 100644
index 000000000..9634decaa
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
@@ -0,0 +1,70 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import typer._
+import Phases._
+import ast.Trees._
+import Contexts._
+import Symbols._
+import Flags.PackageVal
+import Decorators._
+
+/** A base class for transforms.
+ * A transform contains a compiler phase which applies a tree transformer.
+ */
+abstract class MacroTransform extends Phase {
+
+ import ast.tpd._
+
+ override def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ unit.tpdTree = newTransformer.transform(unit.tpdTree)(ctx.withPhase(transformPhase))
+ }
+
+ protected def newTransformer(implicit ctx: Context): Transformer
+
+ /** The phase in which the transformation should be run.
+ * By default this is the phase given by the this macro transformer,
+ * but it could be overridden to be the phase following that one.
+ */
+ protected def transformPhase(implicit ctx: Context): Phase = this
+
+ class Transformer extends TreeMap {
+
+ protected def localCtx(tree: Tree)(implicit ctx: Context) = {
+ val sym = tree.symbol
+ val owner = if (sym is PackageVal) sym.moduleClass else sym
+ ctx.fresh.setTree(tree).setOwner(owner)
+ }
+
+ def transformStats(trees: List[Tree], exprOwner: Symbol)(implicit ctx: Context): List[Tree] = {
+ def transformStat(stat: Tree): Tree = stat match {
+ case _: Import | _: DefTree => transform(stat)
+ case Thicket(stats) => cpy.Thicket(stat)(stats mapConserve transformStat)
+ case _ => transform(stat)(ctx.exprContext(stat, exprOwner))
+ }
+ flatten(trees.mapconserve(transformStat(_)))
+ }
+
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = {
+ tree match {
+ case EmptyValDef =>
+ tree
+ case _: PackageDef | _: MemberDef =>
+ super.transform(tree)(localCtx(tree))
+ case impl @ Template(constr, parents, self, _) =>
+ cpy.Template(tree)(
+ transformSub(constr),
+ transform(parents)(ctx.superCallContext),
+ transformSelf(self),
+ transformStats(impl.body, tree.symbol))
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+ def transformSelf(vd: ValDef)(implicit ctx: Context) =
+ cpy.ValDef(vd)(tpt = transform(vd.tpt))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
new file mode 100644
index 000000000..01c240e3a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
@@ -0,0 +1,129 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers._
+import Phases.Phase
+import Contexts.Context
+import SymDenotations.SymDenotation
+import Types._
+import Symbols._
+import SymUtils._
+import Constants._
+import ast.Trees._
+import TreeTransforms._
+import NameOps._
+import Flags._
+import Decorators._
+
+/** Provides the implementations of all getters and setters, introducing
+ * fields to hold the value accessed by them.
+ * TODO: Make LazyVals a part of this phase?
+ *
+ * <accessor> <stable> <mods> def x(): T = e
+ * --> private val x: T = e
+ * <accessor> <stable> <mods> def x(): T = x
+ *
+ * <accessor> <mods> def x(): T = e
+ * --> private var x: T = e
+ * <accessor> <mods> def x(): T = x
+ *
+ * <accessor> <mods> def x_=(y: T): Unit = ()
+ * --> <accessor> <mods> def x_=(y: T): Unit = x = y
+ */
+ class Memoize extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName = "memoize"
+
+ /* Makes sure that, after getters and constructors gen, there doesn't
+ * exist non-deferred definitions that are not implemented. */
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
+ def errorLackImplementation(t: Tree) = {
+ val firstPhaseId = t.symbol.initial.validFor.firstPhaseId
+ val definingPhase = ctx.withPhase(firstPhaseId).phase.prev
+ throw new AssertionError(
+ i"Non-deferred definition introduced by $definingPhase lacks implementation: $t")
+ }
+ tree match {
+ case ddef: DefDef
+ if !ddef.symbol.is(Deferred) && ddef.rhs == EmptyTree =>
+ errorLackImplementation(ddef)
+ case tdef: TypeDef
+ if tdef.symbol.isClass && !tdef.symbol.is(Deferred) && tdef.rhs == EmptyTree =>
+ errorLackImplementation(tdef)
+ case _ =>
+ }
+ super.checkPostCondition(tree)
+ }
+
+ /** Should run after mixin so that fields get generated in the
+ * class that contains the concrete getter rather than the trait
+ * that defines it.
+ */
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Mixin])
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val sym = tree.symbol
+
+ def newField = {
+ val fieldType =
+ if (sym.isGetter) sym.info.resultType
+ else /*sym.isSetter*/ sym.info.firstParamTypes.head
+
+ ctx.newSymbol(
+ owner = ctx.owner,
+ name = sym.name.asTermName.fieldName,
+ flags = Private | (if (sym is Stable) EmptyFlags else Mutable),
+ info = fieldType,
+ coord = tree.pos)
+ .withAnnotationsCarrying(sym, defn.FieldMetaAnnot)
+ .enteredAfter(thisTransform)
+ }
+
+ /** Can be used to filter annotations on getters and setters; not used yet */
+ def keepAnnotations(denot: SymDenotation, meta: ClassSymbol) = {
+ val cpy = sym.copySymDenotation()
+ cpy.filterAnnotations(_.symbol.derivesFrom(meta))
+ if (cpy.annotations ne denot.annotations) cpy.installAfter(thisTransform)
+ }
+
+ lazy val field = sym.field.orElse(newField).asTerm
+
+ def adaptToField(tree: Tree) =
+ if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen)
+
+ if (sym.is(Accessor, butNot = NoFieldNeeded))
+ if (sym.isGetter) {
+ def skipBlocks(t: Tree): Tree = t match {
+ case Block(_, t1) => skipBlocks(t1)
+ case _ => t
+ }
+ skipBlocks(tree.rhs) match {
+ case lit: Literal if sym.is(Final) && isIdempotentExpr(tree.rhs) =>
+ // duplicating scalac behavior: for final vals that have rhs as constant, we do not create a field
+ // and instead return the value. This seemingly minor optimization has huge effect on initialization
+ // order and the values that can be observed during superconstructor call
+
+ // see remark about idempotency in PostTyper#normalizeTree
+ cpy.DefDef(tree)(rhs = lit)
+ case _ =>
+ var rhs = tree.rhs.changeOwnerAfter(sym, field, thisTransform)
+ if (isWildcardArg(rhs)) rhs = EmptyTree
+
+ val fieldDef = transformFollowing(ValDef(field, adaptToField(rhs)))
+ val getterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(ref(field))(ctx.withOwner(sym), info))
+ Thicket(fieldDef, getterDef)
+ }
+ } else if (sym.isSetter) {
+ if (!sym.is(ParamAccessor)) { val Literal(Constant(())) = tree.rhs } // this is intended as an assertion
+ field.setFlag(Mutable) // necessary for vals mixed in from Scala2 traits
+ val initializer = Assign(ref(field), adaptToField(ref(tree.vparamss.head.head.symbol)))
+ cpy.DefDef(tree)(rhs = transformFollowingDeep(initializer)(ctx.withOwner(sym), info))
+ }
+ else tree // curiously, some accessors from Scala2 have ' ' suffixes. They count as
+ // neither getters nor setters
+ else tree
+ }
+ private val NoFieldNeeded = Lazy | Deferred | JavaDefined
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala
new file mode 100644
index 000000000..27cfc835a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala
@@ -0,0 +1,257 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import Contexts.Context
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import Phases._
+import ast.untpd
+import ast.Trees._
+import collection.mutable
+
+/** This phase performs the following transformations:
+ *
+ * 1. (done in `traitDefs` and `transformSym`) Map every concrete trait getter
+ *
+ * <mods> def x(): T = expr
+ *
+ * to the pair of definitions:
+ *
+ * <mods> def x(): T
+ * protected def initial$x(): T = { stats; expr }
+ *
+ * where `stats` comprises all statements between either the start of the trait
+ * or the previous field definition which are not definitions (i.e. are executed for
+ * their side effects).
+ *
+ * 2. (done in `traitDefs`) Make every concrete trait setter
+ *
+ * <mods> def x_=(y: T) = ()
+ *
+ * deferred by mapping it to
+ *
+ * <mods> def x_=(y: T)
+ *
+ * 3. For a non-trait class C:
+ *
+ * For every trait M directly implemented by the class (see SymUtils.mixin), in
+ * reverse linearization order, add the following definitions to C:
+ *
+ * 3.1 (done in `traitInits`) For every parameter accessor `<mods> def x(): T` in M,
+ * in order of textual occurrence, add
+ *
+ * <mods> def x() = e
+ *
+ * where `e` is the constructor argument in C that corresponds to `x`. Issue
+ * an error if no such argument exists.
+ *
+ * 3.2 (done in `traitInits`) For every concrete trait getter `<mods> def x(): T` in M
+ * which is not a parameter accessor, in order of textual occurrence, produce the following:
+ *
+ * 3.2.1 If `x` is also a member of `C`, and M is a Dotty trait:
+ *
+ * <mods> def x(): T = super[M].initial$x()
+ *
+ * 3.2.2 If `x` is also a member of `C`, and M is a Scala 2.x trait:
+ *
+ * <mods> def x(): T = _
+ *
+ * 3.2.3 If `x` is not a member of `C`, and M is a Dotty trait:
+ *
+ * super[M].initial$x()
+ *
+ * 3.2.4 If `x` is not a member of `C`, and M is a Scala2.x trait, nothing gets added.
+ *
+ *
+ * 3.3 (done in `superCallOpt`) The call:
+ *
+ * super[M].<init>
+ *
+ * 3.4 (done in `setters`) For every concrete setter `<mods> def x_=(y: T)` in M:
+ *
+ * <mods> def x_=(y: T) = ()
+ *
+ * 4. (done in `transformTemplate` and `transformSym`) Drop all parameters from trait
+ * constructors.
+ *
+ * 5. (done in `transformSym`) Drop ParamAccessor flag from all parameter accessors in traits.
+ *
+ * Conceptually, this is the second half of the previous mixin phase. It needs to run
+ * after erasure because it copies references to possibly private inner classes and objects
+ * into enclosing classes where they are not visible. This can only be done if all references
+ * are symbolic.
+ */
+class Mixin extends MiniPhaseTransform with SymTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "mixin"
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure])
+
+ override def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation =
+ if (sym.is(Accessor, butNot = Deferred) && sym.owner.is(Trait)) {
+ val sym1 =
+ if (sym is Lazy) sym
+ else sym.copySymDenotation(initFlags = sym.flags &~ ParamAccessor | Deferred)
+ sym1.ensureNotPrivate
+ }
+ else if (sym.isConstructor && sym.owner.is(Trait))
+ sym.copySymDenotation(
+ name = nme.TRAIT_CONSTRUCTOR,
+ info = MethodType(Nil, sym.info.resultType))
+ else
+ sym
+
+ private def initializer(sym: Symbol)(implicit ctx: Context): TermSymbol = {
+ if (sym is Lazy) sym
+ else {
+ val initName = InitializerName(sym.name.asTermName)
+ sym.owner.info.decl(initName).symbol
+ .orElse(
+ ctx.newSymbol(
+ sym.owner,
+ initName,
+ Protected | Synthetic | Method,
+ sym.info,
+ coord = sym.symbol.coord).enteredAfter(thisTransform))
+ }
+ }.asTerm
+
+ override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo) = {
+ val cls = impl.symbol.owner.asClass
+ val ops = new MixinOps(cls, thisTransform)
+ import ops._
+
+ def traitDefs(stats: List[Tree]): List[Tree] = {
+ val initBuf = new mutable.ListBuffer[Tree]
+ stats.flatMap({
+ case stat: DefDef if stat.symbol.isGetter && !stat.rhs.isEmpty && !stat.symbol.is(Flags.Lazy) =>
+ // make initializer that has all effects of previous getter,
+ // replace getter rhs with empty tree.
+ val vsym = stat.symbol
+ val isym = initializer(vsym)
+ val rhs = Block(
+ initBuf.toList.map(_.changeOwnerAfter(impl.symbol, isym, thisTransform)),
+ stat.rhs.changeOwnerAfter(vsym, isym, thisTransform).wildcardToDefault)
+ initBuf.clear()
+ cpy.DefDef(stat)(rhs = EmptyTree) :: DefDef(isym, rhs) :: Nil
+ case stat: DefDef if stat.symbol.isSetter =>
+ cpy.DefDef(stat)(rhs = EmptyTree) :: Nil
+ case stat: DefTree =>
+ stat :: Nil
+ case stat =>
+ initBuf += stat
+ Nil
+ }) ++ initBuf
+ }
+
+ /** Map constructor call to a pair of a supercall and a list of arguments
+ * to be used as initializers of trait parameters if the target of the call
+ * is a trait.
+ */
+ def transformConstructor(tree: Tree): (Tree, List[Tree]) = {
+ val Apply(sel @ Select(New(_), nme.CONSTRUCTOR), args) = tree
+ val (callArgs, initArgs) = if (tree.symbol.owner.is(Trait)) (Nil, args) else (args, Nil)
+ (superRef(tree.symbol, tree.pos).appliedToArgs(callArgs), initArgs)
+ }
+
+ val superCallsAndArgs = (
+ for (p <- impl.parents if p.symbol.isConstructor)
+ yield p.symbol.owner -> transformConstructor(p)
+ ).toMap
+ val superCalls = superCallsAndArgs.mapValues(_._1)
+ val initArgs = superCallsAndArgs.mapValues(_._2)
+
+ def superCallOpt(baseCls: Symbol): List[Tree] = superCalls.get(baseCls) match {
+ case Some(call) =>
+ if (defn.PhantomClasses.contains(baseCls)) Nil else call :: Nil
+ case None =>
+ if (baseCls.is(NoInitsTrait) || defn.PhantomClasses.contains(baseCls)) Nil
+ else {
+ //println(i"synth super call ${baseCls.primaryConstructor}: ${baseCls.primaryConstructor.info}")
+ transformFollowingDeep(superRef(baseCls.primaryConstructor).appliedToNone) :: Nil
+ }
+ }
+
+ def was(sym: Symbol, flags: FlagSet) =
+ ctx.atPhase(thisTransform) { implicit ctx => sym is flags }
+
+ def traitInits(mixin: ClassSymbol): List[Tree] = {
+ var argNum = 0
+ def nextArgument() = initArgs.get(mixin) match {
+ case Some(arguments) =>
+ val result = arguments(argNum)
+ argNum += 1
+ result
+ case None =>
+ assert(
+ impl.parents.forall(_.tpe.typeSymbol != mixin),
+ i"missing parameters for $mixin from $impl should have been caught in typer")
+ ctx.error(
+ em"""parameterized $mixin is indirectly implemented,
+ |needs to be implemented directly so that arguments can be passed""",
+ cls.pos)
+ EmptyTree
+ }
+
+ for (getter <- mixin.info.decls.toList if getter.isGetter && !was(getter, Deferred)) yield {
+ val isScala2x = mixin.is(Scala2x)
+ def default = Underscore(getter.info.resultType)
+ def initial = transformFollowing(superRef(initializer(getter)).appliedToNone)
+
+ /** A call to the implementation of `getter` in `mixin`'s implementation class */
+ def lazyGetterCall = {
+ def canbeImplClassGetter(sym: Symbol) = sym.info.firstParamTypes match {
+ case t :: Nil => t.isDirectRef(mixin)
+ case _ => false
+ }
+ val implClassGetter = mixin.implClass.info.nonPrivateDecl(getter.name)
+ .suchThat(canbeImplClassGetter).symbol
+ ref(mixin.implClass).select(implClassGetter).appliedTo(This(cls))
+ }
+
+ if (isCurrent(getter) || getter.is(ExpandedName)) {
+ val rhs =
+ if (was(getter, ParamAccessor)) nextArgument()
+ else if (isScala2x)
+ if (getter.is(Lazy, butNot = Module)) lazyGetterCall
+ else if (getter.is(Module))
+ New(getter.info.resultType, List(This(cls)))
+ else Underscore(getter.info.resultType)
+ else initial
+ // transformFollowing call is needed to make memoize & lazy vals run
+ transformFollowing(DefDef(implementation(getter.asTerm), rhs))
+ }
+ else if (isScala2x || was(getter, ParamAccessor)) EmptyTree
+ else initial
+ }
+ }
+
+ def setters(mixin: ClassSymbol): List[Tree] =
+ for (setter <- mixin.info.decls.filter(setr => setr.isSetter && !was(setr, Deferred)).toList)
+ yield transformFollowing(DefDef(implementation(setter.asTerm), unitLiteral.withPos(cls.pos)))
+
+ cpy.Template(impl)(
+ constr =
+ if (cls.is(Trait)) cpy.DefDef(impl.constr)(vparamss = Nil :: Nil)
+ else impl.constr,
+ parents = impl.parents.map(p => TypeTree(p.tpe).withPos(p.pos)),
+ body =
+ if (cls is Trait) traitDefs(impl.body)
+ else {
+ val mixInits = mixins.flatMap { mixin =>
+ flatten(traitInits(mixin)) ::: superCallOpt(mixin) ::: setters(mixin)
+ }
+ superCallOpt(superCls) ::: mixInits ::: impl.body
+ })
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala
new file mode 100644
index 000000000..6cebf7197
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala
@@ -0,0 +1,68 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Symbols._, Types._, Contexts._, SymDenotations._, DenotTransformers._, Flags._
+import util.Positions._
+import SymUtils._
+import StdNames._, NameOps._
+
+class MixinOps(cls: ClassSymbol, thisTransform: DenotTransformer)(implicit ctx: Context) {
+ import ast.tpd._
+
+ val superCls: Symbol = cls.superClass
+ val mixins: List[ClassSymbol] = cls.mixins
+
+ def implementation(member: TermSymbol): TermSymbol = {
+ val res = member.copy(
+ owner = cls,
+ name = member.name.stripScala2LocalSuffix,
+ flags = member.flags &~ Deferred,
+ info = cls.thisType.memberInfo(member)).enteredAfter(thisTransform).asTerm
+ res.addAnnotations(member.annotations)
+ res
+ }
+
+ def superRef(target: Symbol, pos: Position = cls.pos): Tree = {
+ val sup = if (target.isConstructor && !target.owner.is(Trait))
+ Super(This(cls), tpnme.EMPTY, true)
+ else
+ Super(This(cls), target.owner.name.asTypeName, false, target.owner)
+ //println(i"super ref $target on $sup")
+ ast.untpd.Select(sup.withPos(pos), target.name)
+ .withType(NamedType.withFixedSym(sup.tpe, target))
+ //sup.select(target)
+ }
+
+ /** Is `sym` a member of implementing class `cls`?
+ * The test is performed at phase `thisTransform`.
+ */
+ def isCurrent(sym: Symbol) =
+ ctx.atPhase(thisTransform) { implicit ctx =>
+ cls.info.member(sym.name).hasAltWith(_.symbol == sym)
+ }
+
+ /** Does `method` need a forwarder to in class `cls`
+ * Method needs a forwarder in those cases:
+ * - there's a class defining a method with same signature
+ * - there are multiple traits defining method with same signature
+ */
+ def needsForwarder(meth: Symbol): Boolean = {
+ lazy val competingMethods = cls.baseClasses.iterator
+ .filter(_ ne meth.owner)
+ .map(meth.overriddenSymbol)
+ .filter(_.exists)
+ .toList
+
+ def needsDisambiguation = competingMethods.exists(x=> !(x is Deferred)) // multiple implementations are available
+ def hasNonInterfaceDefinition = competingMethods.exists(!_.owner.is(Trait)) // there is a definition originating from class
+ meth.is(Method, butNot = PrivateOrAccessorOrDeferred) &&
+ isCurrent(meth) &&
+ (needsDisambiguation || hasNonInterfaceDefinition || meth.owner.is(Scala2x))
+ }
+
+ final val PrivateOrAccessorOrDeferred = Private | Accessor | Deferred
+
+ def forwarder(target: Symbol) = (targs: List[Type]) => (vrefss: List[List[Tree]]) =>
+ superRef(target).appliedToTypes(targs).appliedToArgss(vrefss)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
new file mode 100644
index 000000000..5c2cd3145
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
@@ -0,0 +1,77 @@
+package dotty.tools.dotc.transform
+
+import dotty.tools.dotc.ast.{Trees, tpd}
+import dotty.tools.dotc.core.Annotations.Annotation
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.DenotTransformers.{InfoTransformer, SymTransformer}
+import dotty.tools.dotc.core.SymDenotations.SymDenotation
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.NameOps._
+import dotty.tools.dotc.core.{Flags, Names}
+import dotty.tools.dotc.core.Names.Name
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Types.MethodType
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Move static methods from companion to the class itself */
+class MoveStatics extends MiniPhaseTransform with SymTransformer { thisTransformer =>
+
+ import tpd._
+ override def phaseName = "moveStatic"
+
+
+ def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation = {
+ if (sym.hasAnnotation(defn.ScalaStaticAnnot) && sym.owner.is(Flags.Module) && sym.owner.companionClass.exists) {
+ sym.owner.asClass.delete(sym.symbol)
+ sym.owner.companionClass.asClass.enter(sym.symbol)
+ val flags = if (sym.is(Flags.Method)) sym.flags else sym.flags | Flags.Mutable
+ sym.copySymDenotation(owner = sym.owner.companionClass, initFlags = flags)
+ }
+ else sym
+ }
+
+ override def transformStats(trees: List[Tree])(implicit ctx: Context, info: TransformerInfo): List[Tree] = {
+ if (ctx.owner.is(Flags.Package)) {
+ val (classes, others) = trees.partition(x => x.isInstanceOf[TypeDef] && x.symbol.isClass)
+ val pairs = classes.groupBy(_.symbol.name.stripModuleClassSuffix).asInstanceOf[Map[Name, List[TypeDef]]]
+
+ def rebuild(orig: TypeDef, newBody: List[Tree]): Tree = {
+ if (orig eq null) return EmptyTree
+
+ val staticFields = newBody.filter(x => x.isInstanceOf[ValDef] && x.symbol.hasAnnotation(defn.ScalaStaticAnnot)).asInstanceOf[List[ValDef]]
+ val newBodyWithStaticConstr =
+ if (staticFields.nonEmpty) {
+ /* do NOT put Flags.JavaStatic here. It breaks .enclosingClass */
+ val staticCostructor = ctx.newSymbol(orig.symbol, Names.STATIC_CONSTRUCTOR, Flags.Synthetic | Flags.Method | Flags.Private, MethodType(Nil, defn.UnitType))
+ staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot))
+ staticCostructor.entered
+
+ val staticAssigns = staticFields.map(x => Assign(ref(x.symbol), x.rhs.changeOwner(x.symbol, staticCostructor)))
+ tpd.DefDef(staticCostructor, Block(staticAssigns, tpd.unitLiteral)) :: newBody
+ } else newBody
+
+ val oldTemplate = orig.rhs.asInstanceOf[Template]
+ cpy.TypeDef(orig)(rhs = cpy.Template(orig.rhs)(oldTemplate.constr, oldTemplate.parents, oldTemplate.self, newBodyWithStaticConstr))
+ }
+
+ def move(module: TypeDef, companion: TypeDef): List[Tree] = {
+ if (!module.symbol.is(Flags.Module)) move(companion, module)
+ else {
+ val allMembers =
+ (if(companion ne null) {companion.rhs.asInstanceOf[Template].body} else Nil) ++
+ module.rhs.asInstanceOf[Template].body
+ val (newModuleBody, newCompanionBody) = allMembers.partition(x => {assert(x.symbol.exists); x.symbol.owner == module.symbol})
+ Trees.flatten(rebuild(companion, newCompanionBody) :: rebuild(module, newModuleBody) :: Nil)
+ }
+ }
+ val newPairs =
+ for ((name, classes) <- pairs)
+ yield
+ if (classes.tail.isEmpty)
+ if (classes.head.symbol.is(Flags.Module)) move(classes.head, null)
+ else List(rebuild(classes.head, classes.head.rhs.asInstanceOf[Template].body))
+ else move(classes.head, classes.tail.head)
+ Trees.flatten(newPairs.toList.flatten ++ others)
+ } else trees
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
new file mode 100644
index 000000000..7680e283e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
@@ -0,0 +1,92 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._, Phases._
+import TreeTransforms._
+import ast.Trees._
+import collection.mutable
+
+object NonLocalReturns {
+ import ast.tpd._
+ def isNonLocalReturn(ret: Return)(implicit ctx: Context) =
+ ret.from.symbol != ctx.owner.enclosingMethod || ctx.owner.is(Lazy)
+}
+
+/** Implement non-local returns using NonLocalReturnControl exceptions.
+ */
+class NonLocalReturns extends MiniPhaseTransform { thisTransformer =>
+ override def phaseName = "nonLocalReturns"
+
+ import NonLocalReturns._
+ import ast.tpd._
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[ElimByName])
+
+ private def ensureConforms(tree: Tree, pt: Type)(implicit ctx: Context) =
+ if (tree.tpe <:< pt) tree
+ else Erasure.Boxing.adaptToType(tree, pt)
+
+ /** The type of a non-local return expression with given argument type */
+ private def nonLocalReturnExceptionType(argtype: Type)(implicit ctx: Context) =
+ defn.NonLocalReturnControlType.appliedTo(argtype)
+
+ /** A hashmap from method symbols to non-local return keys */
+ private val nonLocalReturnKeys = mutable.Map[Symbol, TermSymbol]()
+
+ /** Return non-local return key for given method */
+ private def nonLocalReturnKey(meth: Symbol)(implicit ctx: Context) =
+ nonLocalReturnKeys.getOrElseUpdate(meth,
+ ctx.newSymbol(
+ meth, ctx.freshName("nonLocalReturnKey").toTermName, Synthetic, defn.ObjectType, coord = meth.pos))
+
+ /** Generate a non-local return throw with given return expression from given method.
+ * I.e. for the method's non-local return key, generate:
+ *
+ * throw new NonLocalReturnControl(key, expr)
+ * todo: maybe clone a pre-existing exception instead?
+ * (but what to do about exceptions that miss their targets?)
+ */
+ private def nonLocalReturnThrow(expr: Tree, meth: Symbol)(implicit ctx: Context) =
+ Throw(
+ New(
+ defn.NonLocalReturnControlType,
+ ref(nonLocalReturnKey(meth)) :: expr.ensureConforms(defn.ObjectType) :: Nil))
+
+ /** Transform (body, key) to:
+ *
+ * {
+ * val key = new Object()
+ * try {
+ * body
+ * } catch {
+ * case ex: NonLocalReturnControl =>
+ * if (ex.key().eq(key)) ex.value().asInstanceOf[T]
+ * else throw ex
+ * }
+ * }
+ */
+ private def nonLocalReturnTry(body: Tree, key: TermSymbol, meth: Symbol)(implicit ctx: Context) = {
+ val keyDef = ValDef(key, New(defn.ObjectType, Nil))
+ val nonLocalReturnControl = defn.NonLocalReturnControlType
+ val ex = ctx.newSymbol(meth, nme.ex, EmptyFlags, nonLocalReturnControl, coord = body.pos)
+ val pat = BindTyped(ex, nonLocalReturnControl)
+ val rhs = If(
+ ref(ex).select(nme.key).appliedToNone.select(nme.eq).appliedTo(ref(key)),
+ ref(ex).select(nme.value).ensureConforms(meth.info.finalResultType),
+ Throw(ref(ex)))
+ val catches = CaseDef(pat, EmptyTree, rhs) :: Nil
+ val tryCatch = Try(body, catches, EmptyTree)
+ Block(keyDef :: Nil, tryCatch)
+ }
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree =
+ nonLocalReturnKeys.remove(tree.symbol) match {
+ case Some(key) => cpy.DefDef(tree)(rhs = nonLocalReturnTry(tree.rhs, key, tree.symbol))
+ case _ => tree
+ }
+
+ override def transformReturn(tree: Return)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (isNonLocalReturn(tree)) nonLocalReturnThrow(tree.expr, tree.from.symbol).withPos(tree.pos)
+ else tree
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/NormalizeFlags.scala b/compiler/src/dotty/tools/dotc/transform/NormalizeFlags.scala
new file mode 100644
index 000000000..755846904
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/NormalizeFlags.scala
@@ -0,0 +1,25 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers.SymTransformer
+import Phases.Phase
+import Contexts.Context
+import SymDenotations.SymDenotation
+import TreeTransforms.MiniPhaseTransform
+import Flags._, Symbols._
+
+/** 1. Widens all private[this] and protected[this] qualifiers to just private/protected
+ * 2. Sets PureInterface flag for traits that only have pure interface members and that
+ * do not have initialization code. A pure interface member is either an abstract
+ * or alias type definition or a deferred val or def.
+ */
+class NormalizeFlags extends MiniPhaseTransform with SymTransformer { thisTransformer =>
+ override def phaseName = "normalizeFlags"
+
+ def transformSym(ref: SymDenotation)(implicit ctx: Context) = {
+ var newFlags = ref.flags &~ Local
+ if (newFlags != ref.flags) ref.copySymDenotation(initFlags = newFlags)
+ else ref
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
new file mode 100644
index 000000000..650a03054
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
@@ -0,0 +1,140 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Flags._, Symbols._, Contexts._, Types._, Scopes._, Decorators._
+import util.HashSet
+import collection.mutable
+import collection.immutable.BitSet
+import scala.annotation.tailrec
+
+/** A module that can produce a kind of iterator (`Cursor`),
+ * which yields all pairs of overriding/overridden symbols
+ * that are visible in some baseclass, unless there's a parent class
+ * that already contains the same pairs.
+ *
+ * Adapted from the 2.9 version of OverridingPairs. The 2.10 version is IMO
+ * way too unwieldy to be maintained.
+ */
+object OverridingPairs {
+
+ /** The cursor class
+ * @param base the base class that contains the overriding pairs
+ */
+ class Cursor(base: Symbol)(implicit ctx: Context) {
+
+ private val self = base.thisType
+
+ /** Symbols to exclude: Here these are constructors and private locals.
+ * But it may be refined in subclasses.
+ */
+ protected def exclude(sym: Symbol): Boolean = !sym.memberCanMatchInheritedSymbols
+
+ /** The parents of base (may also be refined).
+ */
+ protected def parents: Array[Symbol] = base.info.parents.toArray map (_.typeSymbol)
+
+ /** Does `sym1` match `sym2` so that it qualifies as overriding.
+ * Types always match. Term symbols match if their membertypes
+ * relative to <base>.this do
+ */
+ protected def matches(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.isType || self.memberInfo(sym1).matches(self.memberInfo(sym2))
+
+ /** The symbols that can take part in an overriding pair */
+ private val decls = {
+ val decls = newScope
+ // fill `decls` with overriding shadowing overridden */
+ def fillDecls(bcs: List[Symbol], deferred: Boolean): Unit = bcs match {
+ case bc :: bcs1 =>
+ fillDecls(bcs1, deferred)
+ var e = bc.info.decls.lastEntry
+ while (e != null) {
+ if (e.sym.is(Deferred) == deferred && !exclude(e.sym))
+ decls.enter(e.sym)
+ e = e.prev
+ }
+ case nil =>
+ }
+ // first, deferred (this will need to change if we change lookup rules!
+ fillDecls(base.info.baseClasses, deferred = true)
+ // then, concrete.
+ fillDecls(base.info.baseClasses, deferred = false)
+ decls
+ }
+
+ private val subParents = {
+ val subParents = new mutable.HashMap[Symbol, BitSet]
+ for (bc <- base.info.baseClasses)
+ subParents(bc) = BitSet(parents.indices.filter(parents(_).derivesFrom(bc)): _*)
+ subParents
+ }
+
+ private def hasCommonParentAsSubclass(cls1: Symbol, cls2: Symbol): Boolean =
+ (subParents(cls1) intersect subParents(cls2)).nonEmpty
+
+ /** The scope entries that have already been visited as overridden
+ * (maybe excluded because of hasCommonParentAsSubclass).
+ * These will not appear as overriding
+ */
+ private val visited = new mutable.HashSet[Symbol]
+
+ /** The current entry candidate for overriding
+ */
+ private var curEntry = decls.lastEntry
+
+ /** The current entry candidate for overridden */
+ private var nextEntry = curEntry
+
+ /** The current candidate symbol for overriding */
+ var overriding: Symbol = _
+
+ /** If not null: The symbol overridden by overriding */
+ var overridden: Symbol = _
+
+ //@M: note that next is called once during object initialization
+ final def hasNext: Boolean = nextEntry ne null
+
+ /** @post
+ * curEntry = the next candidate that may override something else
+ * nextEntry = curEntry
+ * overriding = curEntry.sym
+ */
+ private def nextOverriding(): Unit = {
+ @tailrec def loop(): Unit =
+ if (curEntry ne null) {
+ overriding = curEntry.sym
+ if (visited.contains(overriding)) {
+ curEntry = curEntry.prev
+ loop()
+ }
+ }
+ loop()
+ nextEntry = curEntry
+ }
+
+ /** @post
+ * hasNext = there is another overriding pair
+ * overriding = overriding member of the pair, provided hasNext is true
+ * overridden = overridden member of the pair, provided hasNext is true
+ */
+ @tailrec final def next(): Unit =
+ if (nextEntry ne null) {
+ nextEntry = decls.lookupNextEntry(nextEntry)
+ if (nextEntry ne null) {
+ overridden = nextEntry.sym
+ if (overriding.owner != overridden.owner && matches(overriding, overridden)) {
+ visited += overridden
+ if (!hasCommonParentAsSubclass(overriding.owner, overridden.owner)) return
+ }
+ } else {
+ curEntry = curEntry.prev
+ nextOverriding()
+ }
+ next()
+ }
+
+ nextOverriding()
+ next()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
new file mode 100644
index 000000000..9571c387b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
@@ -0,0 +1,94 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import ast.Trees._
+import Contexts._, Types._, Symbols._, Flags._, TypeUtils._, DenotTransformers._, StdNames._
+
+/** For all parameter accessors
+ *
+ * val x: T = ...
+ *
+ * if
+ * (1) x is forwarded in the supercall to a parameter that's also named `x`
+ * (2) the superclass parameter accessor for `x` is accessible from the current class
+ * change the accessor to
+ *
+ * def x: T = super.x.asInstanceOf[T]
+ *
+ * Do the same also if there are intermediate inaccessible parameter accessor forwarders.
+ * The aim of this transformation is to avoid redundant parameter accessor fields.
+ */
+class ParamForwarding(thisTransformer: DenotTransformer) {
+ import ast.tpd._
+
+ def forwardParamAccessors(impl: Template)(implicit ctx: Context): Template = {
+ def fwd(stats: List[Tree])(implicit ctx: Context): List[Tree] = {
+ val (superArgs, superParamNames) = impl.parents match {
+ case superCall @ Apply(fn, args) :: _ =>
+ fn.tpe.widen match {
+ case MethodType(paramNames, _) => (args, paramNames)
+ case _ => (Nil, Nil)
+ }
+ case _ => (Nil, Nil)
+ }
+ def inheritedAccessor(sym: Symbol): Symbol = {
+ /**
+ * Dmitry: having it have the same name is needed to maintain correctness in presence of subclassing
+ * if you would use parent param-name `a` to implement param-field `b`
+ * overriding field `b` will actually override field `a`, that is wrong!
+ *
+ * class A(val s: Int);
+ * class B(val b: Int) extends A(b)
+ * class C extends A(2) {
+ * def s = 3
+ * assert(this.b == 2)
+ * }
+ */
+ val candidate = sym.owner.asClass.superClass
+ .info.decl(sym.name).suchThat(_ is (ParamAccessor, butNot = Mutable)).symbol
+ if (candidate.isAccessibleFrom(currentClass.thisType, superAccess = true)) candidate
+ else if (candidate is Method) inheritedAccessor(candidate)
+ else NoSymbol
+ }
+ def forwardParamAccessor(stat: Tree): Tree = {
+ stat match {
+ case stat: ValDef =>
+ val sym = stat.symbol.asTerm
+ if (sym is (ParamAccessor, butNot = Mutable)) {
+ val idx = superArgs.indexWhere(_.symbol == sym)
+ if (idx >= 0 && superParamNames(idx) == stat.name) { // supercall to like-named parameter
+ val alias = inheritedAccessor(sym)
+ if (alias.exists) {
+ def forwarder(implicit ctx: Context) = {
+ sym.copySymDenotation(initFlags = sym.flags | Method | Stable, info = sym.info.ensureMethodic)
+ .installAfter(thisTransformer)
+ val superAcc =
+ Super(This(currentClass), tpnme.EMPTY, inConstrCall = false).select(alias)
+ DefDef(sym, superAcc.ensureConforms(sym.info.widen))
+ }
+ return forwarder(ctx.withPhase(thisTransformer.next))
+ }
+ }
+ }
+ case _ =>
+ }
+ stat
+ }
+ stats map forwardParamAccessor
+ }
+
+ cpy.Template(impl)(body = fwd(impl.body)(ctx.withPhase(thisTransformer)))
+ }
+
+ def adaptRef[T <: RefTree](tree: T)(implicit ctx: Context): T = tree.tpe match {
+ case tpe: TermRefWithSignature
+ if tpe.sig == Signature.NotAMethod && tpe.symbol.is(Method) =>
+ // It's a param forwarder; adapt the signature
+ tree.withType(
+ TermRef.withSig(tpe.prefix, tpe.name, tpe.prefix.memberInfo(tpe.symbol).signature))
+ .asInstanceOf[T]
+ case _ =>
+ tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
new file mode 100644
index 000000000..3e25cf82e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -0,0 +1,1989 @@
+package dotty.tools.dotc
+package transform
+
+import scala.language.postfixOps
+
+import TreeTransforms._
+import core.Denotations._
+import core.SymDenotations._
+import core.Contexts._
+import core.Symbols._
+import core.Types._
+import core.Constants._
+import core.StdNames._
+import dotty.tools.dotc.ast.{untpd, TreeTypeMap, tpd}
+import dotty.tools.dotc.core
+import dotty.tools.dotc.core.DenotTransformers.DenotTransformer
+import dotty.tools.dotc.core.Phases.Phase
+import dotty.tools.dotc.core.{TypeApplications, Flags}
+import dotty.tools.dotc.typer.Applications
+import dotty.tools.dotc.util.Positions
+import typer.ErrorReporting._
+import ast.Trees._
+import Applications._
+import TypeApplications._
+import SymUtils._, core.NameOps._
+import core.Mode
+import patmat._
+
+import dotty.tools.dotc.util.Positions.Position
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Flags
+
+import scala.reflect.internal.util.Collections
+
+/** This transform eliminates patterns. Right now it's a dummy.
+ * Awaiting the real pattern matcher.
+ * elimRepeated is required
+ * TODO: outer tests are not generated yet.
+ */
+class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
+ import dotty.tools.dotc.ast.tpd._
+
+ override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
+
+ override def runsAfter = Set(classOf[ElimRepeated])
+
+ override def runsAfterGroupsOf = Set(classOf[TailRec]) // tailrec is not capable of reversing the patmat tranformation made for tree
+
+ override def phaseName = "patternMatcher"
+
+ private var _id = 0 // left for debuging
+
+ override def transformMatch(tree: Match)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val translated = new Translator()(ctx).translator.translateMatch(tree)
+
+ // check exhaustivity and unreachability
+ val engine = new SpaceEngine
+ if (engine.checkable(tree)) {
+ engine.checkExhaustivity(tree)
+ engine.checkRedundancy(tree)
+ }
+
+ translated.ensureConforms(tree.tpe)
+ }
+
+ class Translator(implicit ctx: Context) {
+
+ def translator = {
+ new OptimizingMatchTranslator/*(localTyper)*/
+ }
+
+ class OptimizingMatchTranslator extends MatchOptimizer/*(val typer: analyzer.Typer)*/ with MatchTranslator
+
+ trait CodegenCore {
+ private var ctr = 0 // left for debugging
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x", owner: Symbol = ctx.owner) = {
+ ctr += 1
+ ctx.newSymbol(owner, ctx.freshName(prefix + ctr).toTermName, Flags.Synthetic | Flags.Case, tp, coord = pos)
+ }
+
+ def newSynthCaseLabel(name: String, tpe: Type, owner: Symbol = ctx.owner) =
+ ctx.newSymbol(owner, ctx.freshName(name).toTermName, Flags.Label | Flags.Synthetic | Flags.Method, tpe).asTerm
+ //NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // codegen relevant to the structure of the translation (how extractors are combined)
+ trait AbsCodegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Symbol => Tree]): Tree
+
+ // local / context-free
+
+ /* cast b to tp */
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ /* a check `checker` == binder */
+ def _equals(checker: Tree, binder: Symbol): Tree
+ /* b.isIsInstanceOf[tp] */
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ /* tgt is expected to be a Seq, call tgt.drop(n) */
+ def drop(tgt: Tree)(n: Int): Tree
+ /* tgt is expected to have method apply(int), call tgt.drop(i) */
+ def index(tgt: Tree)(i: Int): Tree
+ /* make tree that accesses the i'th component of the tuple referenced by binder */
+ def tupleSel(binder: Symbol)(i: Int): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen {
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, thenp: Tree): Tree =
+ If(c, thenp, zero)
+ protected def zero: Tree
+ }
+
+ def codegen: AbsCodegen
+
+ abstract class CommonCodegen extends AbsCodegen {
+ def tupleSel(binder: Symbol)(i: Int): Tree = ref(binder).select(nme.productAccessorName(i))
+ def index(tgt: Tree)(i: Int): Tree = {
+ if (i > 0) tgt.select(defn.Seq_apply).appliedTo(Literal(Constant(i)))
+ else tgt.select(defn.Seq_head).ensureApplied
+ }
+
+ // Right now this blindly calls drop on the result of the unapplySeq
+ // unless it verifiably has no drop method (this is the case in particular
+ // with Array.) You should not actually have to write a method called drop
+ // for name-based matching, but this was an expedient route for the basics.
+ def drop(tgt: Tree)(n: Int): Tree = {
+ def callDirect = tgt.select(nme.drop).appliedTo(Literal(Constant(n)))
+ def callRuntime = ref(defn.ScalaRuntime_drop).appliedTo(tgt, Literal(Constant(n)))
+
+ def needsRuntime = !(tgt.tpe derivesFrom defn.SeqClass) /*typeOfMemberNamedDrop(tgt.tpe) == NoType*/
+
+ if (needsRuntime) callRuntime else callDirect
+ }
+
+ // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def _equals(checker: Tree, binder: Symbol): Tree =
+ tpd.applyOverloaded(checker, nme.EQ, List(ref(binder)), List.empty, defn.BooleanType)
+
+ // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = ref(b).ensureConforms(tp) // andType here breaks t1048
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = ref(b).select(defn.Any_isInstanceOf).appliedToType(tp)
+ }
+ }
+
+ object Rebindings {
+ def apply(from: Symbol, to: Symbol) = new Rebindings(List(from), List(ref(to)))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Rebindings(from, to) else NoRebindings
+ }
+
+ class Rebindings(val lhs: List[Symbol], val rhs: List[Tree]) {
+ def >>(other: Rebindings) = {
+ if (other eq NoRebindings) this
+ else if (this eq NoRebindings) other
+ else {
+ assert((lhs.toSet ++ other.lhs.toSet).size == lhs.length + other.lhs.length, "no double assignments")
+ new Rebindings(this.lhs ++ other.lhs, this.rhs ++ other.rhs)
+ }
+ }
+
+ def emitValDefs: List[ValDef] = {
+ Collections.map2(lhs, rhs)((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info)))
+ }
+ }
+ object NoRebindings extends Rebindings(Nil, Nil)
+
+ trait OptimizedCodegen extends CodegenCore {
+ override def codegen: AbsCodegen = optimizedCodegen
+
+ // when we know we're targetting Option, do some inlining the optimizer won't do
+ // for example, `o.flatMap(f)` becomes `if (o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ object optimizedCodegen extends CommonCodegen {
+
+ /** Inline runOrElse and get rid of Option allocations
+ *
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
+ * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
+ * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
+ */
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Symbol => Tree]): Tree = {
+ //val matchRes = ctx.newSymbol(NoSymbol, ctx.freshName("matchRes").toTermName, Flags.Synthetic | Flags.Param | Flags.Label | Flags.Method, restpe /*withoutAnnotations*/)
+ //NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
+
+
+ val caseSyms: List[TermSymbol] = cases.scanLeft(ctx.owner.asTerm)((curOwner, nextTree) => newSynthCaseLabel(ctx.freshName("case"), MethodType(Nil, restpe), curOwner)).tail
+
+ // must compute catchAll after caseLabels (side-effects nextCase)
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+ val catchAllDef = matchFailGen.map { _(scrutSym) }
+ .getOrElse(Throw(New(defn.MatchErrorType, List(ref(scrutSym)))))
+
+ val matchFail = newSynthCaseLabel(ctx.freshName("matchFail"), MethodType(Nil, restpe))
+ val catchAllDefBody = DefDef(matchFail, catchAllDef)
+
+ val nextCases = (caseSyms.tail ::: List(matchFail)).map(ref(_).ensureApplied)
+ val caseDefs = (cases zip caseSyms zip nextCases).foldRight[Tree](catchAllDefBody) {
+ // dotty deviation
+ //case (((mkCase, sym), nextCase), acc) =>
+ (x: (((Casegen => Tree), TermSymbol), Tree), acc: Tree) => x match {
+ case ((mkCase, sym), nextCase) =>
+ val body = mkCase(new OptimizedCasegen(nextCase)).ensureConforms(restpe)
+
+ DefDef(sym, _ => Block(List(acc), body))
+ }
+ }
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ // val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives
+
+ Block(List(caseDefs), ref(caseSyms.head).ensureApplied)
+ }
+
+ class OptimizedCasegen(nextCase: Tree) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Symbol => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = /*ref(matchEnd) appliedTo*/ res // a jump to a case label is special-cased in typedApply
+ protected def zero: Tree = nextCase
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+
+ val getTp = extractorMemberType(prev.tpe, nme.get)
+ val isDefined = extractorMemberType(prev.tpe, nme.isDefined)
+
+ if ((isDefined isRef defn.BooleanClass) && getTp.exists) {
+ // isDefined and get may be overloaded
+ val getDenot = prev.tpe.member(nme.get).suchThat(_.info.isParameterless)
+ val isDefinedDenot = prev.tpe.member(nme.isDefined).suchThat(_.info.isParameterless)
+
+ val tmpSym = freshSym(prev.pos, prev.tpe, "o")
+ val prevValue = ref(tmpSym).select(getDenot.symbol).ensureApplied
+
+ Block(
+ List(ValDef(tmpSym, prev)),
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(
+ ref(tmpSym).select(isDefinedDenot.symbol),
+ Block(List(ValDef(b.asTerm, prevValue)), next)
+ )
+ )
+ } else {
+ assert(defn.isProductSubType(prev.tpe))
+ val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+ ifThenElseZero(
+ nullCheck,
+ Block(
+ List(ValDef(b.asTerm, prev)),
+ next //Substitution(b, ref(prevSym))(next)
+ )
+ )
+ }
+ }
+
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest = Block(List(ValDef(nextBinder.asTerm, res)), next)
+ ifThenElseZero(cond, rest)
+ }
+
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, Block(
+ List(Assign(ref(condSym), Literal(Constant(true))),
+ Assign(ref(nextBinder), res)),
+ next
+ ))
+ }
+ }
+ }
+ final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
+ object Suppression {
+ val NoSuppression = Suppression(false, false)
+ }
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // the making of the trees
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakers extends CodegenCore {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree])
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {}
+
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Symbol => Tree], unchecked: Boolean): Option[Tree] = {
+ // TODO Deal with guards?
+
+ def isSwitchableType(tpe: Type): Boolean =
+ (tpe isRef defn.IntClass) ||
+ (tpe isRef defn.ByteClass) ||
+ (tpe isRef defn.ShortClass) ||
+ (tpe isRef defn.CharClass)
+
+ object IntEqualityTestTreeMaker {
+ def unapply(treeMaker: EqualityTestTreeMaker): Option[Int] = treeMaker match {
+ case EqualityTestTreeMaker(`scrutSym`, _, Literal(const), _) =>
+ if (const.isIntRange) Some(const.intValue)
+ else None
+ case _ =>
+ None
+ }
+ }
+
+ def isSwitchCase(treeMakers: List[TreeMaker]): Boolean = treeMakers match {
+ // case 5 =>
+ case List(IntEqualityTestTreeMaker(_), _: BodyTreeMaker) =>
+ true
+
+ // case 5 | 6 =>
+ case List(AlternativesTreeMaker(`scrutSym`, alts, _), _: BodyTreeMaker) =>
+ alts.forall {
+ case List(IntEqualityTestTreeMaker(_)) => true
+ case _ => false
+ }
+
+ // case _ =>
+ case List(_: BodyTreeMaker) =>
+ true
+
+ /* case x @ pat =>
+ * This includes:
+ * case x =>
+ * case x @ 5 =>
+ * case x @ (5 | 6) =>
+ */
+ case (_: SubstOnlyTreeMaker) :: rest =>
+ isSwitchCase(rest)
+
+ case _ =>
+ false
+ }
+
+ /* (Nil, body) means that `body` is the default case
+ * It's a bit hacky but it simplifies manipulations.
+ */
+ def extractSwitchCase(treeMakers: List[TreeMaker]): (List[Int], BodyTreeMaker) = treeMakers match {
+ // case 5 =>
+ case List(IntEqualityTestTreeMaker(intValue), body: BodyTreeMaker) =>
+ (List(intValue), body)
+
+ // case 5 | 6 =>
+ case List(AlternativesTreeMaker(_, alts, _), body: BodyTreeMaker) =>
+ val intValues = alts.map {
+ case List(IntEqualityTestTreeMaker(intValue)) => intValue
+ }
+ (intValues, body)
+
+ // case _ =>
+ case List(body: BodyTreeMaker) =>
+ (Nil, body)
+
+ // case x @ pat =>
+ case (_: SubstOnlyTreeMaker) :: rest =>
+ /* Rebindings have been propagated, so the eventual body in `rest`
+ * contains all the necessary information. The substitution can be
+ * dropped at this point.
+ */
+ extractSwitchCase(rest)
+ }
+
+ def doOverlap(a: List[Int], b: List[Int]): Boolean =
+ a.exists(b.contains _)
+
+ def makeSwitch(valuesToCases: List[(List[Int], BodyTreeMaker)]): Tree = {
+ def genBody(body: BodyTreeMaker): Tree = {
+ val valDefs = body.rebindings.emitValDefs
+ if (valDefs.isEmpty) body.body
+ else Block(valDefs, body.body)
+ }
+
+ val intScrut =
+ if (pt isRef defn.IntClass) ref(scrutSym)
+ else Select(ref(scrutSym), nme.toInt)
+
+ val (normalCases, defaultCaseAndRest) = valuesToCases.span(_._1.nonEmpty)
+
+ val newCases = for {
+ (values, body) <- normalCases
+ } yield {
+ val literals = values.map(v => Literal(Constant(v)))
+ val pat =
+ if (literals.size == 1) literals.head
+ else Alternative(literals)
+ CaseDef(pat, EmptyTree, genBody(body))
+ }
+
+ val catchAllDef = {
+ if (defaultCaseAndRest.isEmpty) {
+ matchFailGenOverride.fold[Tree](
+ Throw(New(defn.MatchErrorType, List(ref(scrutSym)))))(
+ _(scrutSym))
+ } else {
+ /* After the default case, assuming the IR even allows anything,
+ * things are unreachable anyway and can be removed.
+ */
+ genBody(defaultCaseAndRest.head._2)
+ }
+ }
+ val defaultCase = CaseDef(Underscore(defn.IntType), EmptyTree, catchAllDef)
+
+ Match(intScrut, newCases :+ defaultCase)
+ }
+
+ val dealiased = scrut.tpe.widenDealias
+ if (isSwitchableType(dealiased) && cases.forall(isSwitchCase)) {
+ val valuesToCases = cases.map(extractSwitchCase)
+ val values = valuesToCases.map(_._1)
+ if (values.tails.exists { tail => tail.nonEmpty && tail.tail.exists(doOverlap(_, tail.head)) }) {
+ // TODO Deal with overlapping cases (mostly useless without guards)
+ None
+ } else {
+ Some(makeSwitch(valuesToCases))
+ }
+ } else {
+ if (dealiased hasAnnotation defn.SwitchAnnot)
+ ctx.warning("failed to emit switch for `@switch` annotated match", scrut.pos)
+ None
+ }
+ }
+
+ // for catch (no need to customize match failure)
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None // todo
+
+ abstract class TreeMaker {
+ def pos: Position
+
+ private[this] var currSub: Rebindings = null
+
+ /** captures the scope and the value of the bindings in patterns
+ * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
+ */
+ def rebindings: Rebindings =
+ if (currSub eq null) introducedRebindings
+ else currSub
+
+ protected def introducedRebindings: Rebindings
+
+ private[TreeMakers] def incorporateOuterRebinding(outerSubst: Rebindings): Unit = {
+ if (currSub ne null) {
+ ctx.debuglog("BUG: incorporateOuterRebinding called more than once for " + ((this, currSub, outerSubst)))
+ if (ctx.debug) Thread.dumpStack()
+ }
+ else currSub = outerSubst >> rebindings
+ }
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * Should not be used to perform actual substitution!
+ * Only used to reason symbolically about the values the subpattern binders are bound to.
+ * See TreeMakerToCond#updateSubstitution.
+ *
+ * Overridden in PreserveSubPatBinders to pretend it replaces the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ *
+ * TODO: clean this up, would be nicer to have some higher-level way to compute
+ * the binders bound by this tree maker and the symbolic values that correspond to them
+ */
+ def subPatternsAsRebindings: Rebindings = rebindings
+
+ // build Tree that chains `next` after the current extractor
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ sealed trait NoNewBinders extends TreeMaker {
+ protected val introducedRebindings: Rebindings = NoRebindings
+ }
+
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def pos = tree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
+ }
+
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def pos = body.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ /*atPos(body.pos)*/(casegen.one(body)) // since SubstOnly treemakers are dropped, need to do it here
+ override def toString = "B" + ((body, matchPt))
+ }
+
+ /**
+ * In scalac for such block
+ * x match {
+ * case d => <body>
+ * }
+ *
+ * d inside <body> was to be substitued by x.
+ *
+ * In dotty, SubstOnlyTreeMakers instead generate normal ValDef,
+ * and does not create a new substitution.
+ *
+ * This was done for several reasons:
+ * 1) it is a lot easyer to Y-check,
+ * as d type could be used in <body>.
+ * 2) it would simplify debugging of the generated code as
+ * this works also for nested patterns, and previously they used unreadable names
+ * 3) It showed better(~30%), performance,
+ * Rebuilding tree and propagating types was taking substantial time.
+ */
+ case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
+ val pos = Positions.NoPosition
+
+ val introducedRebindings = Rebindings(prevBinder, nextBinder)
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = next
+ //override def toString = "S" + localSubstitution
+ }
+
+ sealed abstract class FunTreeMaker extends TreeMaker {
+ val nextBinder: Symbol
+ def pos = nextBinder.pos
+ }
+
+ sealed abstract class CondTreeMaker extends FunTreeMaker {
+ val prevBinder: Symbol
+ val nextBinderTp: Type
+ val cond: Tree
+ val res: Tree
+
+ val nextBinder: Symbol
+ lazy val introducedRebindings = /*
+ if (nextBinder ne prevBinder) Rebindings(prevBinder, nextBinder)
+ else */ NoRebindings
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ if (prevBinder ne nextBinder) // happens when typeTest is known to succeed
+ /*atPos(pos)(*/casegen.flatMapCond(cond, res, nextBinder, next)//)
+ else casegen.flatMapGuard(cond, next)
+ }
+
+ // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
+ protected val debugInfoEmitVars = true //!settings.optimise.value
+
+ /**
+ * Tree maker that captures sub pattern values during pattern match.
+ */
+ sealed trait PreserveSubPatBinders extends TreeMaker {
+ val subPatBinders: List[Symbol] // captured values
+ val subPatRefs: List[Tree] // trees that will replace references to subPatBinders
+ val ignoredSubPatBinders: Set[Symbol] // ignored as they aren't used in body of pattern
+
+ // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
+ // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
+
+ def emitVars = storedBinders.nonEmpty
+
+ lazy val storedSubsted = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
+
+ def stored = storedSubsted._1
+
+ def substed = storedSubsted._2
+
+ // dd: this didn't yet trigger error. But I believe it would. if this causes double denition of symbol error this can be replaced with NoRebindings
+ protected lazy val introducedRebindings: Rebindings = if (!emitVars) Rebindings(subPatBinders, subPatRefs)
+ else {
+ val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
+ Rebindings(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
+ }
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * We pretend to replace the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ */
+ override def subPatternsAsRebindings =
+ Rebindings(subPatBinders, subPatRefs) >> super.subPatternsAsRebindings
+
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
+ else {
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new collection.mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ new DeepFolder[Unit]((x: Unit, t: Tree) =>
+ if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol).apply((), in)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+
+ Block(Collections.map2(subPatBindersStored.toList, subPatRefsStored.toList)((bind, ref) => {
+ // required in case original pattern had a more precise type
+ // eg case s@"foo" => would be otherwise translated to s with type String instead of String("foo")
+ def refTpeWiden = ref.tpe.widen
+ def bindInfoWiden = bind.info.widen
+ def loc = bind.showFullName
+ if (!(ref.tpe <:< bind.info.widen)) {
+ ctx.debuglog(s"here ${bind.showFullName} expected: ${bindInfoWiden.show} got: ${refTpeWiden.show}")
+ }
+ val refCasted = ref.ensureConforms(bind.info)
+ ValDef(bind.asTerm, refCasted)
+ }), in)
+ }
+ }
+ }
+
+ /**
+ * Make a TreeMaker that will result in an extractor call specified by `extractor`
+ * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
+ * a function with binder `nextBinder` over our extractor's result
+ * the function's body is determined by the next TreeMaker
+ * (furthermore, the interpretation of `flatMap` depends on the codegen instance we're using).
+ *
+ * The values for the subpatterns, as computed by the extractor call in `extractor`,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739).
+ */
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ extractorReturnsBoolean: Boolean,
+ val checkedLength: Option[Int],
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
+
+ ctx.debuglog(s"""
+ |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
+ | $subPatBinders
+ | $subPatRefs
+ | $extractorReturnsBoolean
+ | $checkedLength
+ | $prevBinder
+ | $ignoredSubPatBinders
+ |}""".stripMargin)
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond match {
+ case Some(cond: Tree) =>
+ casegen.ifThenElseZero(cond, bindSubPats(next))
+ case _ =>
+ bindSubPats(next)
+ }
+
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, unitLiteral, nextBinder, condAndNext)
+ else casegen.flatMap(extractor, nextBinder, condAndNext) // getType?
+ }
+
+ override def toString = "X" + ((extractor, nextBinder.name))
+ }
+
+ /**
+ * An optimized version of ExtractorTreeMaker for Products.
+ * For now, this is hard-coded to case classes, and we simply extract the case class fields.
+ *
+ * The values for the subpatterns, as specified by the case class fields at the time of extraction,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739) as well as
+ * avoiding mutation after the pattern has been matched (SI-5158, SI-6070)
+ *
+ * TODO: make this user-definable as follows
+ * When a companion object defines a method `def unapply_1(x: T): U_1`, but no `def unapply` or `def unapplySeq`,
+ * the extractor is considered to match any non-null value of type T
+ * the pattern is expected to have as many sub-patterns as there are `def unapply_I(x: T): U_I` methods,
+ * and the type of the I'th sub-pattern is `U_I`.
+ * The same exception for Seq patterns applies: if the last extractor is of type `Seq[U_N]`,
+ * the pattern must have at least N arguments (exactly N if the last argument is annotated with `: _*`).
+ * The arguments starting at N (and beyond) are taken from the sequence returned by apply_N,
+ * and it is checked that the sequence has enough elements to provide values for all expected sub-patterns.
+ *
+ * For a case class C, the implementation is assumed to be `def unapply_I(x: C) = x._I`,
+ * and the extractor call is inlined under that assumption.
+ */
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ val nextBinder = prevBinder // just passing through
+
+ // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val nullCheck: Tree = ref(prevBinder).select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+
+ val cond: Option[Tree] =
+ if (binderKnownNonNull) extraCond
+ else extraCond.map(nullCheck.select(defn.Boolean_&&).appliedTo).orElse(Some(nullCheck))
+
+ cond match {
+ case Some(cond: Tree) =>
+ casegen.ifThenElseZero(cond, bindSubPats(next))
+ case _ =>
+ bindSubPats(next)
+ }
+ }
+
+ override def toString = "P" + ((prevBinder.name, extraCond getOrElse "", introducedRebindings))
+ }
+
+ object IrrefutableExtractorTreeMaker {
+ // will an extractor with unapply method of methodtype `tp` always succeed?
+ // note: this assumes the other side-conditions implied by the extractor are met
+ // (argument of the right type, length check succeeds for unapplySeq,...)
+ def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
+ // case TypeRef(_, SomeClass, _) => true todo
+ // probably not useful since this type won't be inferred nor can it be written down (yet)
+ // case ConstantTrue => true todo
+ case _ => false
+ }
+
+ def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
+ case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
+ Some((extractor, nextBinder))
+ case _ =>
+ None
+ }
+ }
+
+ object TypeTestTreeMaker {
+ // factored out so that we can consistently generate other representations of the tree that implements the test
+ // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
+ trait TypeTestCondStrategy {
+ type Result
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ // TODO: can probably always widen
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result
+ def nonNullTest(testedBinder: Symbol): Result
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result
+ def eqTest(pat: Tree, testedBinder: Symbol): Result
+ def and(a: Result, b: Result): Result
+ def tru: Result
+ }
+
+ object treeCondStrategy extends TypeTestCondStrategy {
+ type Result = Tree
+
+ def and(a: Result, b: Result): Result = a.select(defn.Boolean_&&).appliedTo(b)
+ def tru = Literal(Constant(true))
+ def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
+ def nonNullTest(testedBinder: Symbol) = ref(testedBinder).select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+ def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
+ def eqTest(pat: Tree, testedBinder: Symbol) = ref(testedBinder).select(defn.Object_eq).appliedTo(pat)
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ val expectedOuter = expectedTp.normalizedPrefix match {
+ //case NoType => Literal(Constant(true)) // fallback for SI-6183 todo?
+ case pre: SingletonType => singleton(pre)
+ }
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ // val outer = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedTp.prefix
+
+ val expectedClass = expectedTp.dealias.classSymbol.asClass
+ val test = codegen._asInstanceOf(testedBinder, expectedTp)
+ // TODO: Use nme.OUTER_SELECT, like the Inliner does?
+ val outerAccessorTested = ctx.atPhase(ctx.explicitOuterPhase.next) { implicit ctx =>
+ ExplicitOuter.ensureOuterAccessors(expectedClass)
+ test.select(ExplicitOuter.outerAccessor(expectedClass)).select(defn.Object_eq).appliedTo(expectedOuter)
+ }
+ outerAccessorTested
+ }
+ }
+
+ /*object pureTypeTestChecker extends TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = false
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false
+ def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }*/
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
+ }
+ }
+
+ /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ *
+ * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+ case class TypeTestTreeMaker(afterTest: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
+ import TypeTestTreeMaker._
+
+ ctx.debuglog("TTTM" + ((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
+
+ val prevBinder = testedBinder
+
+ val nextBinder = afterTest.asTerm
+
+ def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol): Boolean = {
+ // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
+ // generates an outer test based on `patType.prefix` with automatically dealises.
+ patType.dealias match {
+ case tref @ TypeRef(pre, name) =>
+ (pre ne NoPrefix) && tref.symbol.isClass &&
+ ExplicitOuter.needsOuterIfReferenced(tref.symbol.asClass)
+ case _ =>
+ false
+ }
+ }
+
+ override lazy val introducedRebindings = NoRebindings
+
+ def outerTestNeeded = {
+ val np = expectedTp.normalizedPrefix
+ val ts = np.termSymbol
+ (ts ne NoSymbol) && needsOuterTest(expectedTp, testedBinder.info, ctx.owner)
+ }
+
+ // the logic to generate the run-time test that follows from the fact that
+ // a `prevBinder` is expected to have type `expectedTp`
+ // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
+ import cs._
+
+ // propagate expected type
+ def expTp(t: Tree): t.type = t // setType expectedTp todo:
+
+ def testedWide = testedBinder.info.widen
+ def expectedWide = expectedTp.widen
+ def isAnyRef = testedWide <:< defn.AnyRefType
+ def isAsExpected = testedWide <:< expectedTp
+ def isExpectedPrimitiveType = isAsExpected && expectedTp.classSymbol.isPrimitiveValueClass
+ def isExpectedReferenceType = isAsExpected && (expectedTp <:< defn.AnyRefType)
+ def mkNullTest = nonNullTest(testedBinder)
+ def mkOuterTest = outerTest(testedBinder, expectedTp)
+ def mkTypeTest = typeTest(testedBinder, expectedWide)
+
+ def mkEqualsTest(lhs: Tree): cs.Result = equalsTest(lhs, testedBinder)
+ def mkEqTest(lhs: Tree): cs.Result = eqTest(lhs, testedBinder)
+ def addOuterTest(res: cs.Result): cs.Result = if (outerTestNeeded) and(res, mkOuterTest) else res
+
+ // If we conform to expected primitive type:
+ // it cannot be null and cannot have an outer pointer. No further checking.
+ // If we conform to expected reference type:
+ // have to test outer and non-null
+ // If we do not conform to expected type:
+ // have to test type and outer (non-null is implied by successful type test)
+ def mkDefault = (
+ if (isExpectedPrimitiveType) tru
+ else addOuterTest(
+ if (isExpectedReferenceType) mkNullTest
+ else mkTypeTest
+ )
+ )
+
+ // true when called to type-test the argument to an extractor
+ // don't do any fancy equality checking, just test the type
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ if (extractorArgTypeTest) mkDefault
+ else expectedTp match {
+ case ThisType(tref) if tref.symbol.flags is Flags.Module =>
+ and(mkEqualsTest(ref(tref.symbol.companionModule)), mkTypeTest) // must use == to support e.g. List() == Nil
+ case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(Literal(Constant(null))))
+ case ConstantType(const) => mkEqualsTest(expTp(Literal(const)))
+ case t: SingletonType => mkEqTest(singleton(expectedTp)) // SI-4577, SI-4897
+ //case ThisType(sym) => mkEqTest(expTp(This(sym)))
+ case _ => mkDefault
+ }
+ }
+
+ val cond = renderCondition(treeCondStrategy)
+ val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
+
+ // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
+ //def isPureTypeTest = renderCondition(pureTypeTestChecker)
+
+ def impliesBinderNonNull(binder: Symbol): Boolean =
+ // @odersky: scalac is able to infer in this method that nonNullImpliedByTestChecker.Result,
+ // dotty instead infers type projection TreeMakers.this.TypeTestTreeMaker.TypeTestCondStrategy#Result
+ // which in turn doesn't typecheck in this method. Can you please explain why?
+ // dotty deviation
+ renderCondition(nonNullImpliedByTestChecker(binder)).asInstanceOf[Boolean]
+
+ override def toString = "TT" + ((expectedTp, testedBinder.name, nextBinderTp))
+ }
+
+ // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
+ case class EqualityTestTreeMaker(prevBinder: Symbol, subpatBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
+ val nextBinderTp = patTree.tpe & prevBinder.info
+ val nextBinder = if (prevBinder eq subpatBinder) freshSym(pos, nextBinderTp) else subpatBinder
+
+ // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
+ // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
+ val cond = codegen._equals(patTree, prevBinder)
+ val res = ref(prevBinder).ensureConforms(nextBinderTp)
+ override def toString = "ET" + ((prevBinder.name, patTree))
+ }
+
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
+ // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
+
+ override private[TreeMakers] def incorporateOuterRebinding(outerSubst: Rebindings): Unit = {
+ super.incorporateOuterRebinding(outerSubst)
+ altss = altss map (alts => propagateRebindings(alts, rebindings))
+ }
+
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = {
+ /*atPos(pos)*/{
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(Literal(Constant(true)))))(casegen))
+ )
+
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, defn.BooleanType)(combinedAlts, Some((x: Symbol) => Literal(Constant(false))))
+ codegenAlt.ifThenElseZero(findAltMatcher, next)
+ }
+ }
+ }
+
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ val pos = guardTree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(guardTree, next)
+ override def toString = "G(" + guardTree + ")"
+ }
+
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree = {
+ val (testsMakers, guardAndBodyMakers) = treeMakers.span(t => !(t.isInstanceOf[NoNewBinders]))
+ val body = guardAndBodyMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+ val rebindings = guardAndBodyMakers.last.rebindings.emitValDefs
+ testsMakers.foldRight(Block(rebindings, body): Tree)((a, b) => a.chainBefore(b)(casegen))
+ }
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // unlike in scalace it does not drop SubstOnly tree makers,
+ // as there could be types having them as prefix
+ def propagateRebindings(treeMakers: List[TreeMaker], initial: Rebindings): List[TreeMaker] = {
+ var accumSubst: Rebindings = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterRebinding accumSubst
+ accumSubst = maker.rebindings
+ }
+ treeMakers
+ }
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Symbol => Tree]): Tree = {
+ // unlike in scalac SubstOnlyTreeMakers are maintained.
+ val casesRebindingPropagated = casesRaw map (propagateRebindings(_, NoRebindings))
+
+ def matchFailGen = matchFailGenOverride orElse Some((arg: Symbol) => Throw(New(defn.MatchErrorType, List(ref(arg)))))
+
+ ctx.debuglog("combining cases: " + (casesRebindingPropagated.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+
+ val (suppression, requireSwitch): (Suppression, Boolean) =
+ /*if (settings.XnoPatmatAnalysis)*/ (Suppression.NoSuppression, false)
+ /*else scrut match {
+ case Typed(tree, tpt) =>
+ val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
+ val supressUnreachable = tree match {
+ case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
+ case _ => false
+ }
+ val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
+ val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
+ (suppression, requireSwitch)
+ case _ =>
+ (Suppression.NoSuppression, false)
+ }*/
+
+ emitSwitch(scrut, scrutSym, casesRebindingPropagated, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
+ if (requireSwitch) ctx.warning("could not emit switch for @switch annotated match", scrut.pos)
+
+ if (casesRebindingPropagated nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll: Option[Symbol => Tree] =
+ if (casesRebindingPropagated.nonEmpty && {
+ val nonTrivLast = casesRebindingPropagated.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
+
+ analyzeCases(scrutSym, casesRebindingPropagated, pt, suppression)
+
+ val (cases, toHoist) = optimizeCases(scrutSym, casesRebindingPropagated, pt)
+
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases.map(x => combineExtractors(x) _), synthCatchAll)
+
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
+ }
+ }
+
+ trait MatchOptimizer extends OptimizedCodegen with TreeMakers
+ /*with SwitchEmission // todo: toBe ported
+ with CommonSubconditionElimination*/ {
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
+ // TODO: do CSE on result of doDCE(prevBinder, cases, pt)
+ val optCases = cases// todo: doCSE(prevBinder, cases, pt)
+ val toHoist = Nil/*(
+ for (treeMakers <- optCases)
+ yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
+ ).flatten.flatten.toList*/
+ (optCases, toHoist)
+ }
+ }
+
+ trait MatchTranslator extends TreeMakers with ScalacPatternExpanders {
+
+ def isBackquoted(x: Ident) = x.isInstanceOf[BackquotedIdent]
+
+ def isVarPattern(pat: Tree): Boolean = pat match {
+ case x: BackquotedIdent => false
+ case x: Ident => x.name.isVariableName
+ case _ => false
+ }
+
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Typed(_, arg) if arg.tpe.isRepeatedParam => true
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case t if (tpd.isWildcardArg(t)) => true
+ case x: Ident => isVarPattern(x)
+ case Alternative(ps) => ps forall unapply
+ case EmptyTree => true
+ case _ => false
+ }
+ }
+
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall unapply
+ case Typed(PatternBoundToUnderscore(), _) => false // true // Dmitry: change in dotty. Type test will be performed and the field must be stored
+ case _ => false
+ }
+ }
+
+ object SymbolBound {
+ def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+ case Bind(_, expr) if tree.symbol.exists => Some(tree.symbol -> expr)
+ case _ => None
+ }
+ }
+
+ def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match {
+ case SymbolBound(sym, Typed(subpat, tpe)) => BoundTree(freshSym(tree.pos, pt, prefix = "pi"), tree)
+ case SymbolBound(sym, expr) => BoundTree(sym, expr)
+ case _ => BoundTree(freshSym(tree.pos, pt, prefix = "p"), tree)
+ }
+
+ final case class BoundTree(binder: Symbol, tree: Tree) {
+ private lazy val extractor = ExtractorCall(tree, binder)
+
+ def pos = tree.pos
+ def tpe = binder.info.widenDealias
+ def pt = unbound match {
+ // case Star(tpt) => this glbWith seqType(tpt.tpe) dd todo:
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
+
+ def glbWith(other: Type) = ctx.typeComparer.glb(tpe :: other :: Nil)// .normalize
+
+ object SymbolAndTypeBound {
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ case SymbolBound(sym, Typed(_: UnApply, _)) => None // see comment in #189
+ case SymbolBound(sym, TypeBound(tpe)) => Some(sym -> tpe)
+ case TypeBound(tpe) => Some(binder -> tpe)
+ case _ => None
+ }
+ }
+
+ object SymbolAndValueBound {
+ def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+ case SymbolBound(sym, ConstantPattern(const)) => Some(sym -> const)
+ case _ => None
+ }
+ }
+
+ object TypeBound {
+ def unapply(tree: Tree): Option[Type] = tree match {
+ case Typed(_, arg) if !arg.tpe.isRepeatedParam => Some(tree.typeOpt)
+ case _ => None
+ }
+ }
+
+ object ConstantPattern {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => Some(tree)
+ case _ => None
+ }
+ }
+
+ private def rebindTo(pattern: Tree) = BoundTree(binder, pattern)
+ private def step(treeMakers: TreeMaker*)(subpatterns: BoundTree*): TranslationStep = TranslationStep(treeMakers.toList, subpatterns.toList)
+
+ private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern))
+ private def equalityTestStep(testedSymbol: Symbol, constantSymbol: Symbol, constant: Tree)
+ = step(EqualityTestTreeMaker(testedSymbol, constantSymbol, constant, pos))()
+ private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, sub.termRef)(pos))()
+ private def alternativesStep(alts: List[Tree]) = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))()
+ private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate())
+ private def noStep() = step()()
+
+ private def unsupportedPatternMsg =
+ i"unsupported pattern: ${tree.show} / $this (this is a scalac bug.)"
+
+ // example check: List[Int] <:< ::[Int]
+ private def extractorStep(): TranslationStep = {
+ def paramType = extractor.aligner.wholeType
+ import extractor.treeMaker
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ lazy val typeTest = TypeTestTreeMaker(freshSym(pos, paramType), binder, paramType, paramType)(pos, extractorArgTypeTest = true)
+ // check whether typetest implies binder is not null,
+ // even though the eventual null check will be on typeTest.nextBinder
+ // it'll be equal to binder casted to paramType anyway (and the type test is on binder)
+ def extraction: TreeMaker = treeMaker(typeTest.nextBinder, typeTest.impliesBinderNonNull(binder), pos, paramType)
+
+ // paramType = the type expected by the unapply
+ // TODO: paramType may contain unbound type params (run/t2800, run/t3530)
+ val makers = (
+ // Statically conforms to paramType
+ if (tpe <:< paramType) treeMaker(binder, false, pos, tpe) :: Nil
+ else typeTest :: extraction :: Nil
+ )
+ step(makers: _*)(extractor.subBoundTrees: _*)
+ }
+
+ // Summary of translation cases. I moved the excerpts from the specification further below so all
+ // the logic can be seen at once.
+ //
+ // [1] skip wildcard trees -- no point in checking them
+ // [2] extractor and constructor patterns
+ // [3] replace subpatBinder by patBinder, as if the Bind was not there.
+ // It must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type,
+ // this is not guaranteed until we cast
+ // [4] typed patterns - a typed pattern never has any subtrees
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ // [5] literal and stable id patterns
+ // [6] pattern alternatives
+ // [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
+ // don't fail here though (or should we?)
+ def nextStep(): TranslationStep = tree match {
+ case _: UnApply | _: Apply | Typed(_: UnApply | _: Apply, _) => extractorStep()
+ case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
+ case TypeBound(tpe) => typeTestStep(binder, tpe)
+ case SymbolBound(sym, expr) => bindingStep(sym, expr)
+ case WildcardPattern() => noStep()
+ case ConstantPattern(const) => equalityTestStep(binder, binder, const)
+ case Alternative(alts) => alternativesStep(alts)
+ case _ => ctx.error(unsupportedPatternMsg, pos) ; noStep()
+ }
+ def translate(): List[TreeMaker] = nextStep() merge (_.translate())
+
+ private def concreteType = tpe.bounds.hi
+ private def unbound = unbind(tree)
+ private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
+ private def at_s = unbound match {
+ case WildcardPattern() => ""
+ case pat => s" @ $pat"
+ }
+ override def toString = s"${binder.name}: $tpe_s$at_s"
+ }
+
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ final case class TranslationStep(makers: List[TreeMaker], subpatterns: List[BoundTree]) {
+ def merge(f: BoundTree => List[TreeMaker]): List[TreeMaker] = makers ::: (subpatterns flatMap f)
+ override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")")
+ }
+
+ def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
+ case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
+ case _ => false
+ }
+
+ /** Implement a pattern match by turning its cases (including the implicit failure case)
+ * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
+ *
+ * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
+ * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
+ *
+ * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
+ * thus, you must typecheck the result (and that will in turn translate nested matches)
+ * this could probably be optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ */
+ def translateMatch(match_ : Match): Tree = {
+ val Match(sel, cases) = match_
+
+ val selectorTp = sel.tpe.widen.deAnonymize/*withoutAnnotations*/
+
+ val selectorSym = freshSym(sel.pos, selectorTp, "selector")
+
+ val (nonSyntheticCases, defaultOverride) = cases match {
+ case init :+ last if isSyntheticDefaultCase(last) => (init, Some(((scrut: Symbol) => last.body)))
+ case _ => (cases, None)
+ }
+
+
+ // checkMatchVariablePatterns(nonSyntheticCases) // only used for warnings
+
+ // we don't transform after uncurry
+ // (that would require more sophistication when generating trees,
+ // and the only place that emits Matches after typers is for exception handling anyway)
+ /*if (phase.id >= currentRun.uncurryPhase.id)
+ devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases")*/
+
+ ctx.debuglog("translating " + cases.mkString("{", "\n", "}"))
+
+ //val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
+
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ ///val origPt = removeCPSFromPt(match_.tpe)
+ // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
+ // pt is the skolemized version
+ val pt = match_.tpe.widen //repeatedToSeq(origPt)
+
+ // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
+ selectorSym.setFlag(Flags.SyntheticCase)
+
+ // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
+ val combined = combineCases(sel, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, ctx.owner, defaultOverride)
+
+ // if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
+ Block(List(ValDef(selectorSym, sel)), combined)
+ }
+
+ /** The translation of `pat if guard => body` has two aspects:
+ * 1) the substitution due to the variables bound by patterns
+ * 2) the combination of the extractor calls using `flatMap`.
+ *
+ * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
+ * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
+ * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
+ * 1) is tricky because translatePattern_i determines the shape of translatePattern_i + 1:
+ * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
+ * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
+ *
+ * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
+ * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
+ * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
+ * in the scope of the remainder of the pattern, and they must thus be replaced by:
+ * - (for 1-ary unapply) x_1
+ * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
+ * - (for unapplySeq) x_1.apply(i)
+ *
+ * in the treemakers,
+ *
+ * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
+ *
+ * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
+ * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
+ * a function that will take care of binding and substitution of the next ast (to the right).
+ *
+ */
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef): List[TreeMaker] = {
+ val CaseDef(pattern, guard, body) = caseDef
+ translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
+ }
+
+ def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate()
+
+ def translateGuard(guard: Tree): List[TreeMaker] =
+ if (guard == EmptyTree) Nil
+ else List(GuardTreeMaker(guard))
+
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
+ // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
+ // to enable this, probably need to move away from Option to a monad specific to pattern-match,
+ // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
+ // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
+ // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
+ def translateBody(body: Tree, matchPt: Type): TreeMaker =
+ BodyTreeMaker(body, matchPt)
+
+ // Some notes from the specification
+
+ /*A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected
+ types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the
+ instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the
+ expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn)
+ where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter.
+ This is further discussed in (§8.1.9).
+ **/
+
+ /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ */
+
+ /* A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ */
+
+ /* 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ */
+
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ object ExtractorCall {
+ // TODO: check unargs == args
+ def apply(tree: Tree, binder: Symbol): ExtractorCall = {
+ tree match {
+ case UnApply(unfun, implicits, args) =>
+ val castedBinder = ref(binder).ensureConforms(tree.tpe)
+ val synth = if (implicits.isEmpty) unfun.appliedTo(castedBinder) else unfun.appliedTo(castedBinder).appliedToArgs(implicits)
+ new ExtractorCallRegular(alignPatterns(tree, synth.tpe), synth, args, synth.tpe) // extractor
+ case Typed(unapply@ UnApply(unfun, implicits, args), tpt) =>
+ val castedBinder = ref(binder).ensureConforms(unapply.tpe)
+ val synth = /*Typed(*/ if (implicits.isEmpty) unfun.appliedTo(castedBinder) else unfun.appliedTo(castedBinder).appliedToArgs(implicits) //, tpt)
+ new ExtractorCallRegular(alignPatterns(tree, synth.tpe), synth, args, synth.tpe) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(alignPatterns(tree, tree.tpe), fun, args, fun.tpe) // case class
+ }
+ }
+ }
+
+ abstract class ExtractorCall(val aligner: PatternAligned) {
+
+ import aligner._
+
+ def args: List[Tree]
+
+ // don't go looking for selectors if we only expect one pattern
+ def rawSubPatTypes = aligner.extractedTypes
+
+ def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp.baseTypeWithArgs(baseClass)).argInfos match {
+ case x :: Nil => x
+ case _ => or
+ }
+
+ def resultInMonad = if (aligner.isBool) defn.UnitType else {
+ val getTp = extractorMemberType(resultType, nme.get)
+ if ((extractorMemberType(resultType, nme.isDefined) isRef defn.BooleanClass) && getTp.exists)
+ getTp
+ else resultType
+ }
+ def resultType: Type
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position, binderTypeTested: Type): TreeMaker
+
+ // `subPatBinders` are the variables bound by this pattern in the following patterns
+ // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
+ // must set infos to `subPatTypes`, which are provided by extractor's result,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ def subPatBinders = subBoundTrees map (_.binder)
+ lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree
+
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
+
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ private def nonStarSubPatTypes = aligner.typedNonStarPatterns map (_.tpe)
+
+ def subPatTypes: List[Type] = typedPatterns map (_.tpe)
+
+ // there are `prodArity` non-seq elements in the tuple.
+ protected def firstIndexingBinder = prodArity
+ protected def expectedLength = elementArity
+ protected def lastIndexingBinder = totalArity - starArity - 1
+
+ private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
+ private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder))
+ private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder))(expectedLength) :: Nil
+
+ // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = {
+ val accessors =
+ if (defn.isProductSubType(binder.info))
+ productSelectors(binder.info)
+ else binder.caseAccessors
+ val res =
+ if (accessors.isDefinedAt(i - 1)) ref(binder).select(accessors(i - 1).name)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ val rsym = res.symbol // just for debugging
+ res
+ }
+
+ // the trees that select the subpatterns on the extractor's result,
+ // referenced by `binder`
+ protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
+ def lastTrees: List[Tree] = (
+ if (!aligner.isStar) Nil
+ else if (expectedLength == 0) seqTree(binder) :: Nil
+ else genDrop(binder, expectedLength)
+ )
+ // this error-condition has already been checked by checkStarPatOK:
+ // if (isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if (lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= " +(resultInMonad, ts, subPatTypes, subPats))
+
+ // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ // [3] the last one -- if the last subpattern is a sequence wildcard:
+ // drop the prefix (indexed by the refs on the preceding line), return the remainder
+ ( productElemsToN(binder, firstIndexingBinder)
+ ++ genTake(binder, expectedLength)
+ ++ lastTrees
+ ).toList
+ }
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol): List[Tree] = {
+ val refs = if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
+ else if (binder.info.member(nme._1).exists && !isSeq) productElemsToN(binder, totalArity)
+ else ref(binder) :: Nil
+ refs
+ }
+
+ val mathSignymSymbol = defn.ScalaMathPackageVal.requiredMethod("signum".toTermName, List(defn.IntType))
+ val mathSignum = ref(defn.ScalaMathPackageVal).select(mathSignymSymbol)
+
+
+ private def compareInts(t1: Tree, t2: Tree) =
+ mathSignum.appliedTo(t1.select(defn.Int_-).appliedTo(t2))
+ //gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
+
+ protected def lengthGuard(binder: Symbol): Option[Tree] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ checkedLength map { expectedLength =>
+ // `binder.lengthCompare(expectedLength)`
+ // ...if binder has a lengthCompare method, otherwise
+ // `scala.math.signum(binder.length - expectedLength)`
+ def checkExpectedLength: Tree = sequenceType.member(nme.lengthCompare) match {
+ case NoDenotation => compareInts(Select(seqTree(binder), nme.length), Literal(Constant(expectedLength)))
+ case x:SingleDenotation => (seqTree(binder).select(x.symbol)).appliedTo(Literal(Constant(expectedLength)))
+ case _ =>
+ ctx.error("TODO: multiple lengthCompare")
+ EmptyTree
+ }
+
+ // the comparison to perform
+ // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
+ // (otherwise equality is required)
+ def compareOp: (Tree, Tree) => Tree =
+ if (aligner.isStar) _.select(defn.Int_>=).appliedTo(_)
+ else _.select(defn.Int_==).appliedTo(_)
+
+ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
+ (seqTree(binder).select(defn.Any_!=).appliedTo(Literal(Constant(null)))).select(defn.Boolean_&&).appliedTo(compareOp(checkExpectedLength, Literal(Constant(0))))
+ }
+
+ def checkedLength: Option[Int] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ if (!isSeq || expectedLength < starArity) None
+ else Some(expectedLength)
+ }
+
+ // TODO: to be called when there's a def unapplyProd(x: T): U
+ // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
+ // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
+ class ExtractorCallProd(aligner: PatternAligned, val fun: Tree, val args: List[Tree], val resultType: Type) extends ExtractorCall(aligner) {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position, binderTypeTested: Type): TreeMaker = {
+ val paramAccessors = binder.caseAccessors
+ // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
+ val mutableBinders = (
+ if (//!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) // TODO: DDD ???
+ // &&
+ (paramAccessors exists (_.hasAltWith(x => x.symbol is Flags.Mutable))))
+ subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).hasAltWith(x => x.symbol is Flags.Mutable) => binder }
+ else Nil
+ )
+
+ // checks binder ne null before chaining to the next extractor
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
+ }
+ }
+
+ class ExtractorCallRegular(aligner: PatternAligned, extractorCallIncludingDummy: Tree, val args: List[Tree], val resultType: Type) extends ExtractorCall(aligner) {
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position, binderTypeTested: Type): TreeMaker = {
+ // the extractor call (applied to the binder bound by the flatMap corresponding
+ // to the previous (i.e., enclosing/outer) pattern)
+ val extractorApply = extractorCallIncludingDummy// spliceApply(patBinderOrCasted)
+ // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
+ // wrong when isSeq, and resultInMonad should always be correct since it comes
+ // directly from the extractor's result type
+ val binder = freshSym(pos, resultInMonad)
+ val spb = subPatBinders
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
+ spb,
+ subPatRefs(binder, spb, resultType),
+ aligner.isBool,
+ checkedLength,
+ patBinderOrCasted,
+ ignoredSubPatBinders
+ )
+ }
+
+ override protected def seqTree(binder: Symbol): Tree =
+ if (firstIndexingBinder == 0) ref(binder)
+ else super.seqTree(binder)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (totalArity > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol, subpatBinders: List[Symbol], binderTypeTested: Type): List[Tree] = {
+ if (aligner.isSingle && aligner.extractor.prodArity == 1 && defn.isTupleType(binder.info)) {
+ // special case for extractor
+ // comparing with scalac additional assertions added
+ val subpw = subpatBinders.head.info.widen
+ val binderw = binder.info.widen
+ val go = subpatBinders.head.info <:< binder.info
+ val go1 = binder.info <:< subpatBinders.head.info
+ //val spr = subPatRefs(binder)
+ assert(go && go1)
+ ref(binder) :: Nil
+ } else {
+ lazy val getTp = extractorMemberType(binderTypeTested, nme.get)
+ if ((aligner.isSingle && aligner.extractor.prodArity == 1) && ((extractorMemberType(binderTypeTested, nme.isDefined) isRef defn.BooleanClass) && getTp.exists))
+ List(ref(binder))
+ else
+ subPatRefs(binder)
+ }
+ }
+
+ /*protected def spliceApply(binder: Symbol): Tree = {
+ object splice extends TreeMap {
+ def binderRef(pos: Position): Tree =
+ ref(binder) //setPos pos
+
+ override def transform(t: tpd.Tree)(implicit ctx: Context): tpd.Tree = t match {
+ // duplicated with the extractor Unapplied
+ case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
+ cpy.Apply(t, x, binderRef(i.pos) :: Nil)
+ // SI-7868 Account for numeric widening, e.g. <unappplySelector>.toInt
+ case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) =>
+ cpy.Apply(t, x, cpy.Select(sel, binderRef(i.pos), name) :: Nil)
+ case _ =>
+ super.transform(t)
+ }
+ }
+ splice transform extractorCallIncludingDummy
+ }*/
+
+ override def rawSubPatTypes = aligner.extractor.varargsTypes
+ }
+ }
+
+ /** An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*]
+ * A case matches: P1, P2, ..., Pj, opt[Seq[E]]
+ * Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]]
+ *
+ * Here Pm/Fi is the last pattern to match the fixed arity section.
+ *
+ * prodArity: the value of i, i.e. the number of non-sequence types in the extractor
+ * nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition
+ * elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements
+ * starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern
+ * totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition
+ *
+ * Note that prodArity is a function only of the extractor, and
+ * nonStar/star/totalArity are all functions of the patterns. The key
+ * value for aligning and typing the patterns is elementArity, as it
+ * is derived from both sets of information.
+ */
+ trait PatternExpander[Pattern, Type] {
+ /** You'll note we're not inside the cake. "Pattern" and "Type" are
+ * arbitrary types here, and NoPattern and NoType arbitrary values.
+ */
+ def NoPattern: Pattern
+ def NoType: Type
+
+ /** It's not optimal that we're carrying both sequence and repeated
+ * type here, but the implementation requires more unraveling before
+ * it can be avoided.
+ *
+ * sequenceType is Seq[T], elementType is T, repeatedType is T*.
+ */
+ sealed case class Repeated(sequenceType: Type, elementType: Type, repeatedType: Type) {
+ def exists = elementType != NoType
+
+ def elementList = if (exists) elementType :: Nil else Nil
+ def sequenceList = if (exists) sequenceType :: Nil else Nil
+ def repeatedList = if (exists) repeatedType :: Nil else Nil
+
+ override def toString = s"${elementType}*"
+ }
+ object NoRepeated extends Repeated(NoType, NoType, NoType) {
+ override def toString = "<none>"
+ }
+
+ final case class Patterns(fixed: List[Pattern], star: Pattern) {
+ def hasStar = star != NoPattern
+ def starArity = if (hasStar) 1 else 0
+ def nonStarArity = fixed.length
+ def totalArity = nonStarArity + starArity
+ def starPatterns = if (hasStar) star :: Nil else Nil
+ def all = fixed ::: starPatterns
+
+ override def toString = all mkString ", "
+ }
+
+ /** An 'extractor' can be a case class or an unapply or unapplySeq method.
+ * Decoding what it is that they extract takes place before we arrive here,
+ * so that this class can concentrate only on the relationship between
+ * patterns and types.
+ *
+ * In a case class, the class is the unextracted type and the fixed and
+ * repeated types are derived from its constructor parameters.
+ *
+ * In an unapply, this is reversed: the parameter to the unapply is the
+ * unextracted type, and the other types are derived based on the return
+ * type of the unapply method.
+ *
+ * In other words, this case class and unapply are encoded the same:
+ *
+ * case class Foo(x: Int, y: Int, zs: Char*)
+ * def unapplySeq(x: Foo): Option[(Int, Int, Seq[Char])]
+ *
+ * Both are Extractor(Foo, Int :: Int :: Nil, Repeated(Seq[Char], Char, Char*))
+ *
+ * @param whole The type in its unextracted form
+ * @param fixed The non-sequence types which are extracted
+ * @param repeated The sequence type which is extracted
+ */
+ final case class Extractor(whole: Type, fixed: List[Type], repeated: Repeated) {
+ require(whole != NoType, s"expandTypes($whole, $fixed, $repeated)")
+
+ def prodArity = fixed.length
+ def hasSeq = repeated.exists
+ def elementType = repeated.elementType
+ def sequenceType = repeated.sequenceType
+ def allTypes = fixed ::: repeated.sequenceList
+ def varargsTypes = fixed ::: repeated.repeatedList
+ def isErroneous = allTypes contains NoType
+
+ private def typeStrings = fixed.map("" + _) ::: ( if (hasSeq) List("" + repeated) else Nil )
+
+ def offeringString = if (isErroneous) "<error>" else typeStrings match {
+ case Nil => "Boolean"
+ case tp :: Nil => tp
+ case tps => tps.mkString("(", ", ", ")")
+ }
+ override def toString = "%s => %s".format(whole, offeringString)
+ }
+
+ final case class TypedPat(pat: Pattern, tpe: Type) {
+ override def toString = s"$pat: $tpe"
+ }
+
+ /** If elementArity is...
+ * 0: A perfect match between extractor and the fixed patterns.
+ * If there is a star pattern it will match any sequence.
+ * > 0: There are more patterns than products. There will have to be a
+ * sequence which can populate at least <elementArity> patterns.
+ * < 0: There are more products than patterns: compile time error.
+ */
+ final case class Aligned(patterns: Patterns, extractor: Extractor) {
+ def elementArity = patterns.nonStarArity - prodArity
+ def prodArity = extractor.prodArity
+ def starArity = patterns.starArity
+ def totalArity = patterns.totalArity
+
+ def wholeType = extractor.whole
+ def sequenceType = extractor.sequenceType
+ def productTypes = extractor.fixed
+ def extractedTypes = extractor.allTypes
+ def typedNonStarPatterns = products ::: elements
+ def typedPatterns = typedNonStarPatterns ::: stars
+
+ def isBool = !isSeq && prodArity == 0
+ def isSingle = !isSeq && totalArity == 1
+ def isStar = patterns.hasStar
+ def isSeq = extractor.hasSeq
+
+ private def typedAsElement(pat: Pattern) = TypedPat(pat, extractor.elementType)
+ private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType)
+ private def productPats = patterns.fixed take prodArity
+ private def elementPats = patterns.fixed drop prodArity
+ private def products = (productPats, productTypes).zipped map TypedPat
+ private def elements = elementPats map typedAsElement
+ private def stars = patterns.starPatterns map typedAsSequence
+
+ override def toString = s"""
+ |Aligned {
+ | patterns $patterns
+ | extractor $extractor
+ | arities $prodArity/$elementArity/$starArity // product/element/star
+ | typed ${typedPatterns mkString ", "}
+ |}""".stripMargin.trim
+ }
+ }
+
+ /** This is scalac-specific logic layered on top of the scalac-agnostic
+ * "matching products to patterns" logic defined in PatternExpander.
+ */
+ trait ScalacPatternExpanders {
+
+ type PatternAligned = ScalacPatternExpander#Aligned
+
+ implicit class AlignedOps(val aligned: PatternAligned) {
+ import aligned._
+ def expectedTypes = typedPatterns map (_.tpe)
+ def unexpandedFormals = extractor.varargsTypes
+ }
+
+ trait ScalacPatternExpander extends PatternExpander[Tree, Type] {
+ def NoPattern = EmptyTree
+ def NoType = core.Types.NoType
+
+ def newPatterns(patterns: List[Tree]): Patterns = patterns match {
+ case init :+ last if tpd.isWildcardStarArg(last) => Patterns(init, last)
+ case _ => Patterns(patterns, NoPattern)
+ }
+ def typeOfMemberNamedHead(tpe: Type): Type = tpe.select(nme.head)
+ def typeOfMemberNamedApply(tpe: Type): Type = tpe.select(nme.apply)
+
+ def elementTypeOf(tpe: Type) = {
+ val seq = tpe //repeatedToSeq(tpe)
+
+ ( typeOfMemberNamedHead(seq)
+ orElse typeOfMemberNamedApply(seq)
+ orElse seq.elemType
+ )
+ }
+ def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor = {
+ ctx.log(s"newExtractor($whole, $fixed, $repeated")
+ Extractor(whole, fixed, repeated)
+ }
+
+ // Turn Seq[A] into Repeated(Seq[A], A, A*)
+ def repeatedFromSeq(seqType: Type): Repeated = {
+ val elem = elementTypeOf(seqType)
+ val repeated = /*scalaRepeatedType(*/elem//)
+
+ Repeated(seqType, elem, repeated)
+ }
+ // Turn A* into Repeated(Seq[A], A, A*)
+ def repeatedFromVarargs(repeated: Type): Repeated =
+ //Repeated(repeatedToSeq(repeated), repeatedToSingle(repeated), repeated)
+ Repeated(repeated, repeated.elemType, repeated)
+
+ /** In this case we are basing the pattern expansion on a case class constructor.
+ * The argument is the MethodType carried by the primary constructor.
+ */
+ def applyMethodTypes(method: Type): Extractor = {
+ val whole = method.finalResultType
+
+ method.paramTypess.head match {
+ case init :+ last if last.isRepeatedParam => newExtractor(whole, init, repeatedFromVarargs(last))
+ case tps => newExtractor(whole, tps, NoRepeated)
+ }
+ }
+
+ def hasSelectors(tpe: Type) = tpe.member(nme._1).exists && tpe.member(nme._2).exists // dd todo: ???
+
+
+ /** In this case, expansion is based on an unapply or unapplySeq method.
+ * Unfortunately the MethodType does not carry the information of whether
+ * it was unapplySeq, so we have to funnel that information in separately.
+ */
+ def unapplyMethodTypes(tree: Tree, fun: Tree, args: List[Tree], resultType: Type, isSeq: Boolean): Extractor = {
+ _id = _id + 1
+
+ val whole = tree.tpe // see scaladoc for Trees.Unapply
+ // fun.tpe.widen.paramTypess.headOption.flatMap(_.headOption).getOrElse(NoType)//firstParamType(method)
+ val resultOfGet = extractorMemberType(resultType, nme.get)
+
+ val expanded: List[Type] = /*(
+ if (result =:= defn.BooleanType) Nil
+ else if (defn.isProductSubType(result)) productSelectorTypes(result)
+ else if (result.classSymbol is Flags.CaseClass) result.decls.filter(x => x.is(Flags.CaseAccessor) && x.is(Flags.Method)).map(_.info).toList
+ else result.select(nme.get) :: Nil
+ )*/
+ if ((extractorMemberType(resultType, nme.isDefined) isRef defn.BooleanClass) && resultOfGet.exists)
+ getUnapplySelectors(resultOfGet, args)
+ else if (defn.isProductSubType(resultType)) productSelectorTypes(resultType)
+ else if (resultType isRef defn.BooleanClass) Nil
+ else {
+ ctx.error(i"invalid return type in Unapply node: $resultType")
+ Nil
+ }
+
+ expanded match {
+ case init :+ last if isSeq => newExtractor(whole, init, repeatedFromSeq(last))
+ case tps => newExtractor(whole, tps, NoRepeated)
+ }
+ }
+ }
+
+ object alignPatterns extends ScalacPatternExpander {
+ /** Converts a T => (A, B, C) extractor to a T => ((A, B, CC)) extractor.
+ */
+ def tupleExtractor(extractor: Extractor): Extractor =
+ extractor.copy(fixed = defn.tupleType(extractor.fixed) :: Nil)
+
+ private def validateAligned(tree: Tree, aligned: Aligned): Aligned = {
+ import aligned._
+
+ def owner = tree.symbol.owner
+ def offering = extractor.offeringString
+ def symString = tree.symbol.showLocated
+ def offerString = if (extractor.isErroneous) "" else s" offering $offering"
+ def arityExpected = (if (extractor.hasSeq) "at least " else "") + prodArity
+
+ def err(msg: String) = ctx.error(msg, tree.pos)
+ def warn(msg: String) = ctx.warning(msg, tree.pos)
+ def arityError(what: String) = err(s"${_id} $what patterns for $owner$offerString: expected $arityExpected, found $totalArity")
+
+ if (isStar && !isSeq)
+ err("Star pattern must correspond with varargs or unapplySeq")
+ else if (elementArity < 0)
+ arityError("not enough")
+ else if (elementArity > 0 && !extractor.hasSeq)
+ arityError("too many")
+
+ aligned
+ }
+
+ object Applied {
+ // Duplicated with `spliceApply`
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ // SI-7868 Admit Select() to account for numeric widening, e.g. <unappplySelector>.toInt
+ /*case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil)
+ => Some(fun)*/
+ case Apply(fun, _) => unapply(fun)
+ case _ => None
+ }
+ }
+
+ def apply(tree: Tree, sel: Tree, args: List[Tree], resultType: Type): Aligned = {
+ val fn = sel match {
+ case Applied(fn) => fn
+ case _ => sel
+ }
+ val patterns = newPatterns(args)
+ val isSeq = sel.symbol.name == nme.unapplySeq
+ val isUnapply = sel.symbol.name == nme.unapply
+ val extractor = sel.symbol.name match {
+ case nme.unapply => unapplyMethodTypes(tree, /*fn*/sel, args, resultType, isSeq = false)
+ case nme.unapplySeq => unapplyMethodTypes(tree, /*fn*/sel, args, resultType, isSeq = true)
+ case _ => applyMethodTypes(/*fn*/sel.tpe)
+ }
+
+ /** Rather than let the error that is SI-6675 pollute the entire matching
+ * process, we will tuple the extractor before creation Aligned so that
+ * it contains known good values.
+ */
+ def prodArity = extractor.prodArity
+ def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}"
+ val requiresTupling = isUnapply && patterns.totalArity == 1 && prodArity > 1
+
+ //if (requiresTupling && effectivePatternArity(args) == 1)
+ // currentUnit.deprecationWarning(sel.pos, s"${sel.symbol.owner} expects $prodArity patterns$acceptMessage but crushing into $prodArity-tuple to fit single pattern (SI-6675)")
+
+ val normalizedExtractor =
+ if (requiresTupling)
+ tupleExtractor(extractor)
+ else extractor
+ validateAligned(fn, Aligned(patterns, normalizedExtractor))
+ }
+
+ def apply(tree: Tree, resultType: Type): Aligned = tree match {
+ case Typed(tree, _) => apply(tree, resultType)
+ case Apply(fn, args) => apply(tree, fn, args, resultType)
+ case UnApply(fn, implicits, args) => apply(tree, fn, args, resultType)
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala
new file mode 100644
index 000000000..61c3ca5de
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala
@@ -0,0 +1,108 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Contexts.Context
+import Decorators._
+import tasty._
+import config.Printers.{noPrinter, pickling}
+import java.io.PrintStream
+import Periods._
+import Phases._
+import Symbols._
+import Flags.Module
+import collection.mutable
+
+/** This phase pickles trees */
+class Pickler extends Phase {
+ import ast.tpd._
+
+ override def phaseName: String = "pickler"
+
+ private def output(name: String, msg: String) = {
+ val s = new PrintStream(name)
+ s.print(msg)
+ s.close
+ }
+
+ // Maps that keep a record if -Ytest-pickler is set.
+ private val beforePickling = new mutable.HashMap[ClassSymbol, String]
+ private val picklers = new mutable.HashMap[ClassSymbol, TastyPickler]
+
+ /** Drop any elements of this list that are linked module classes of other elements in the list */
+ private def dropCompanionModuleClasses(clss: List[ClassSymbol])(implicit ctx: Context): List[ClassSymbol] = {
+ val companionModuleClasses =
+ clss.filterNot(_ is Module).map(_.linkedClass).filterNot(_.isAbsent)
+ clss.filterNot(companionModuleClasses.contains)
+ }
+
+ override def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ pickling.println(i"unpickling in run ${ctx.runId}")
+
+ for { cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree))
+ tree <- sliceTopLevel(unit.tpdTree, cls) } {
+ val pickler = new TastyPickler()
+ if (ctx.settings.YtestPickler.value) {
+ beforePickling(cls) = tree.show
+ picklers(cls) = pickler
+ }
+ val treePkl = pickler.treePkl
+ treePkl.pickle(tree :: Nil)
+ treePkl.compactify()
+ pickler.addrOfTree = treePkl.buf.addrOfTree
+ pickler.addrOfSym = treePkl.addrOfSym
+ if (tree.pos.exists)
+ new PositionPickler(pickler, treePkl.buf.addrOfTree).picklePositions(tree :: Nil)
+
+ // other pickle sections go here.
+ val pickled = pickler.assembleParts()
+ unit.pickled += (cls -> pickled)
+
+ def rawBytes = // not needed right now, but useful to print raw format.
+ pickled.iterator.grouped(10).toList.zipWithIndex.map {
+ case (row, i) => s"${i}0: ${row.mkString(" ")}"
+ }
+ // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG
+ if (pickling ne noPrinter) {
+ println(i"**** pickled info of $cls")
+ new TastyPrinter(pickler.assembleParts()).printContents()
+ }
+ }
+ }
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
+ val result = super.runOn(units)
+ if (ctx.settings.YtestPickler.value)
+ testUnpickler(
+ ctx.fresh
+ .setPeriod(Period(ctx.runId + 1, FirstPhaseId))
+ .addMode(Mode.ReadPositions))
+ result
+ }
+
+ private def testUnpickler(implicit ctx: Context): Unit = {
+ pickling.println(i"testing unpickler at run ${ctx.runId}")
+ ctx.initialize()
+ val unpicklers =
+ for ((cls, pickler) <- picklers) yield {
+ val unpickler = new DottyUnpickler(pickler.assembleParts())
+ unpickler.enter(roots = Set())
+ cls -> unpickler
+ }
+ pickling.println("************* entered toplevel ***********")
+ for ((cls, unpickler) <- unpicklers) {
+ val unpickled = unpickler.body
+ testSame(i"$unpickled%\n%", beforePickling(cls), cls)
+ }
+ }
+
+ private def testSame(unpickled: String, previous: String, cls: ClassSymbol)(implicit ctx: Context) =
+ if (previous != unpickled) {
+ output("before-pickling.txt", previous)
+ output("after-pickling.txt", unpickled)
+ ctx.error(i"""pickling difference for ${cls.fullName} in ${cls.sourceFile}, for details:
+ |
+ | diff before-pickling.txt after-pickling.txt""")
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
new file mode 100644
index 000000000..1ed47d92e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -0,0 +1,286 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransform, TreeTransformer}
+import dotty.tools.dotc.ast.{Trees, tpd}
+import scala.collection.{ mutable, immutable }
+import ValueClasses._
+import scala.annotation.tailrec
+import core._
+import typer.ErrorReporting._
+import typer.Checking
+import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
+import util.Positions._
+import Decorators._
+import config.Printers.typr
+import Symbols._, TypeUtils._
+
+/** A macro transform that runs immediately after typer and that performs the following functions:
+ *
+ * (1) Add super accessors and protected accessors (@see SuperAccessors)
+ *
+ * (2) Convert parameter fields that have the same name as a corresponding
+ * public parameter field in a superclass to a forwarder to the superclass
+ * field (corresponding = super class field is initialized with subclass field)
+ * (@see ForwardParamAccessors)
+ *
+ * (3) Add synthetic methods (@see SyntheticMethods)
+ *
+ * (4) Check that `New` nodes can be instantiated, and that annotations are valid
+ *
+ * (5) Convert all trees representing types to TypeTrees.
+ *
+ * (6) Check the bounds of AppliedTypeTrees
+ *
+ * (7) Insert `.package` for selections of package object members
+ *
+ * (8) Replaces self references by name with `this`
+ *
+ * (9) Adds SourceFile annotations to all top-level classes and objects
+ *
+ * (10) Adds Child annotations to all sealed classes
+ *
+ * (11) Minimizes `call` fields of `Inline` nodes to just point to the toplevel
+ * class from which code was inlined.
+ *
+ * The reason for making this a macro transform is that some functions (in particular
+ * super and protected accessors and instantiation checks) are naturally top-down and
+ * don't lend themselves to the bottom-up approach of a mini phase. The other two functions
+ * (forwarding param accessors and synthetic methods) only apply to templates and fit
+ * mini-phase or subfunction of a macro phase equally well. But taken by themselves
+ * they do not warrant their own group of miniphases before pickling.
+ */
+class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTransformer =>
+
+ import tpd._
+
+ /** the following two members override abstract members in Transform */
+ override def phaseName: String = "posttyper"
+
+ override def transformPhase(implicit ctx: Context) = thisTransformer.next
+
+ protected def newTransformer(implicit ctx: Context): Transformer =
+ new PostTyperTransformer
+
+ val superAcc = new SuperAccessors(thisTransformer)
+ val paramFwd = new ParamForwarding(thisTransformer)
+ val synthMth = new SyntheticMethods(thisTransformer)
+
+ private def newPart(tree: Tree): Option[New] = methPart(tree) match {
+ case Select(nu: New, _) => Some(nu)
+ case _ => None
+ }
+
+ private def checkValidJavaAnnotation(annot: Tree)(implicit ctx: Context): Unit = {
+ // TODO fill in
+ }
+
+ /** If the type of `tree` is a TermRefWithSignature with an underdefined
+ * signature, narrow the type by re-computing the signature (which should
+ * be fully-defined by now).
+ */
+ private def fixSignature[T <: Tree](tree: T)(implicit ctx: Context): T = tree.tpe match {
+ case tpe: TermRefWithSignature if tpe.signature.isUnderDefined =>
+ typr.println(i"fixing $tree with type ${tree.tpe.widen.toString} with sig ${tpe.signature} to ${tpe.widen.signature}")
+ tree.withType(TermRef.withSig(tpe.prefix, tpe.name, tpe.widen.signature)).asInstanceOf[T]
+ case _ => tree
+ }
+
+ class PostTyperTransformer extends Transformer {
+
+ private var inJavaAnnot: Boolean = false
+
+ private var parentNews: Set[New] = Set()
+
+ private def transformAnnot(annot: Tree)(implicit ctx: Context): Tree = {
+ val saved = inJavaAnnot
+ inJavaAnnot = annot.symbol is JavaDefined
+ if (inJavaAnnot) checkValidJavaAnnotation(annot)
+ try transform(annot)
+ finally inJavaAnnot = saved
+ }
+
+ private def transformAnnot(annot: Annotation)(implicit ctx: Context): Annotation =
+ annot.derivedAnnotation(transformAnnot(annot.tree))
+
+ private def transformMemberDef(tree: MemberDef)(implicit ctx: Context): Unit = {
+ val sym = tree.symbol
+ sym.transformAnnotations(transformAnnot)
+ if (!sym.is(SyntheticOrPrivate) && sym.owner.isClass) {
+ val info1 = Checking.checkNoPrivateLeaks(sym, tree.pos)
+ if (info1 ne sym.info)
+ sym.copySymDenotation(info = info1).installAfter(thisTransformer)
+ }
+ }
+
+ private def transformSelect(tree: Select, targs: List[Tree])(implicit ctx: Context): Tree = {
+ val qual = tree.qualifier
+ qual.symbol.moduleClass.denot match {
+ case pkg: PackageClassDenotation if !tree.symbol.maybeOwner.is(Package) =>
+ transformSelect(cpy.Select(tree)(qual select pkg.packageObj.symbol, tree.name), targs)
+ case _ =>
+ val tree1 = super.transform(tree)
+ constToLiteral(tree1) match {
+ case _: Literal => tree1
+ case _ => superAcc.transformSelect(tree1, targs)
+ }
+ }
+ }
+
+ private def normalizeTypeArgs(tree: TypeApply)(implicit ctx: Context): TypeApply = tree.tpe match {
+ case pt: PolyType => // wait for more arguments coming
+ tree
+ case _ =>
+ def decompose(tree: TypeApply): (Tree, List[Tree]) = tree.fun match {
+ case fun: TypeApply =>
+ val (tycon, args) = decompose(fun)
+ (tycon, args ++ tree.args)
+ case _ =>
+ (tree.fun, tree.args)
+ }
+ def reorderArgs(pnames: List[Name], namedArgs: List[NamedArg], otherArgs: List[Tree]): List[Tree] = pnames match {
+ case pname :: pnames1 =>
+ namedArgs.partition(_.name == pname) match {
+ case (NamedArg(_, arg) :: _, namedArgs1) =>
+ arg :: reorderArgs(pnames1, namedArgs1, otherArgs)
+ case _ =>
+ val otherArg :: otherArgs1 = otherArgs
+ otherArg :: reorderArgs(pnames1, namedArgs, otherArgs1)
+ }
+ case nil =>
+ assert(namedArgs.isEmpty && otherArgs.isEmpty)
+ Nil
+ }
+ val (tycon, args) = decompose(tree)
+ tycon.tpe.widen match {
+ case tp: PolyType =>
+ val (namedArgs, otherArgs) = args.partition(isNamedArg)
+ val args1 = reorderArgs(tp.paramNames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs)
+ TypeApply(tycon, args1).withPos(tree.pos).withType(tree.tpe)
+ case _ =>
+ tree
+ }
+ }
+
+ override def transform(tree: Tree)(implicit ctx: Context): Tree =
+ try tree match {
+ case tree: Ident if !tree.isType =>
+ tree.tpe match {
+ case tpe: ThisType => This(tpe.cls).withPos(tree.pos)
+ case _ => paramFwd.adaptRef(fixSignature(tree))
+ }
+ case tree @ Select(qual, name) =>
+ if (name.isTypeName) {
+ Checking.checkRealizable(qual.tpe, qual.pos.focus)
+ super.transform(tree)
+ }
+ else
+ transformSelect(paramFwd.adaptRef(fixSignature(tree)), Nil)
+ case tree: Super =>
+ if (ctx.owner.enclosingMethod.isInlineMethod)
+ ctx.error(em"super not allowed in inline ${ctx.owner}", tree.pos)
+ super.transform(tree)
+ case tree: TypeApply =>
+ val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree)
+ Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType])
+ fn match {
+ case sel: Select =>
+ val args1 = transform(args)
+ val sel1 = transformSelect(sel, args1)
+ if (superAcc.isProtectedAccessor(sel1)) sel1 else cpy.TypeApply(tree1)(sel1, args1)
+ case _ =>
+ super.transform(tree1)
+ }
+ case tree @ Assign(sel: Select, _) =>
+ superAcc.transformAssign(super.transform(tree))
+ case Inlined(call, bindings, expansion) =>
+ // Leave only a call trace consisting of
+ // - a reference to the top-level class from which the call was inlined,
+ // - the call's position
+ // in the call field of an Inlined node.
+ // The trace has enough info to completely reconstruct positions.
+ // The minimization is done for two reasons:
+ // 1. To save space (calls might contain large inline arguments, which would otherwise
+ // be duplicated
+ // 2. To enable correct pickling (calls can share symbols with the inlined code, which
+ // would trigger an assertion when pickling).
+ val callTrace = Ident(call.symbol.topLevelClass.typeRef).withPos(call.pos)
+ cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion))
+ case tree: Template =>
+ val saved = parentNews
+ parentNews ++= tree.parents.flatMap(newPart)
+ try {
+ val templ1 = paramFwd.forwardParamAccessors(tree)
+ synthMth.addSyntheticMethods(
+ superAcc.wrapTemplate(templ1)(
+ super.transform(_).asInstanceOf[Template]))
+ }
+ finally parentNews = saved
+ case tree: DefDef =>
+ transformMemberDef(tree)
+ superAcc.wrapDefDef(tree)(super.transform(tree).asInstanceOf[DefDef])
+ case tree: TypeDef =>
+ transformMemberDef(tree)
+ val sym = tree.symbol
+ if (sym.isClass) {
+ // Add SourceFile annotation to top-level classes
+ if (sym.owner.is(Package) &&
+ ctx.compilationUnit.source.exists &&
+ sym != defn.SourceFileAnnot)
+ sym.addAnnotation(Annotation.makeSourceFile(ctx.compilationUnit.source.file.path))
+
+ // Add Child annotation to sealed parents unless current class is anonymous
+ if (!sym.isAnonymousClass) // ignore anonymous class
+ for (parent <- sym.asClass.classInfo.classParents) {
+ val pclazz = parent.classSymbol
+ if (pclazz.is(Sealed)) pclazz.addAnnotation(Annotation.makeChild(sym))
+ }
+
+ tree
+ }
+ super.transform(tree)
+ case tree: MemberDef =>
+ transformMemberDef(tree)
+ super.transform(tree)
+ case tree: New if !inJavaAnnot && !parentNews.contains(tree) =>
+ Checking.checkInstantiable(tree.tpe, tree.pos)
+ super.transform(tree)
+ case tree @ Annotated(annotated, annot) =>
+ cpy.Annotated(tree)(transform(annotated), transformAnnot(annot))
+ case tree: AppliedTypeTree =>
+ Checking.checkAppliedType(tree)
+ super.transform(tree)
+ case SingletonTypeTree(ref) =>
+ Checking.checkRealizable(ref.tpe, ref.pos.focus)
+ super.transform(tree)
+ case tree: TypeTree =>
+ tree.withType(
+ tree.tpe match {
+ case AnnotatedType(tpe, annot) => AnnotatedType(tpe, transformAnnot(annot))
+ case tpe => tpe
+ }
+ )
+ case Import(expr, selectors) =>
+ val exprTpe = expr.tpe
+ def checkIdent(ident: Ident): Unit = {
+ val name = ident.name.asTermName.encode
+ if (name != nme.WILDCARD && !exprTpe.member(name).exists && !exprTpe.member(name.toTypeName).exists)
+ ctx.error(s"${ident.name} is not a member of ${expr.show}", ident.pos)
+ }
+ selectors.foreach {
+ case ident: Ident => checkIdent(ident)
+ case Thicket((ident: Ident) :: _) => checkIdent(ident)
+ case _ =>
+ }
+ super.transform(tree)
+ case tree =>
+ super.transform(tree)
+ }
+ catch {
+ case ex : AssertionError =>
+ println(i"error while transforming $tree")
+ throw ex
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/PrivateToStatic.scala.disabled b/compiler/src/dotty/tools/dotc/transform/PrivateToStatic.scala.disabled
new file mode 100644
index 000000000..218839d01
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/PrivateToStatic.scala.disabled
@@ -0,0 +1,94 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers.SymTransformer
+import Contexts.Context
+import Symbols._
+import Scopes._
+import Flags._
+import StdNames._
+import SymDenotations._
+import Types._
+import collection.mutable
+import TreeTransforms._
+import Decorators._
+import ast.Trees._
+import TreeTransforms.TransformerInfo
+
+/** Makes private methods static, provided they not deferred, accessors, or static,
+ * by rewriting a method `m` in class `C` as follows:
+ *
+ * private def m(ps) = e
+ *
+ * --> private static def($this: C, ps) = [this -> $this] e
+ */
+class PrivateToStatic extends MiniPhase with SymTransformer { thisTransform =>
+ import ast.tpd._
+ override def phaseName = "privateToStatic"
+ override def relaxedTyping = true
+
+ private val Immovable = Deferred | Accessor | JavaStatic
+
+ def shouldBeStatic(sd: SymDenotation)(implicit ctx: Context) =
+ sd.current(ctx.withPhase(thisTransform)).asSymDenotation
+ .is(PrivateMethod, butNot = Immovable) &&
+ sd.owner.is(Trait)
+
+ override def transformSym(sd: SymDenotation)(implicit ctx: Context): SymDenotation =
+ if (shouldBeStatic(sd)) {
+ val mt @ MethodType(pnames, ptypes) = sd.info
+ sd.copySymDenotation(
+ initFlags = sd.flags | JavaStatic,
+ info = MethodType(nme.SELF :: pnames, sd.owner.thisType :: ptypes, mt.resultType))
+ }
+ else sd
+
+ val treeTransform = new Transform(NoSymbol)
+
+ class Transform(thisParam: Symbol) extends TreeTransform {
+ def phase = thisTransform
+
+ override def prepareForDefDef(tree: DefDef)(implicit ctx: Context) =
+ if (shouldBeStatic(tree.symbol)) {
+ val selfParam = ctx.newSymbol(tree.symbol, nme.SELF, Param, tree.symbol.owner.thisType, coord = tree.pos)
+ new Transform(selfParam)
+ }
+ else this
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo) =
+ if (shouldBeStatic(tree.symbol)) {
+ val thisParamDef = ValDef(thisParam.asTerm)
+ val vparams :: Nil = tree.vparamss
+ cpy.DefDef(tree)(vparamss = (thisParamDef :: vparams) :: Nil)
+ }
+ else tree
+
+ override def transformThis(tree: This)(implicit ctx: Context, info: TransformerInfo) =
+ if (shouldBeStatic(ctx.owner.enclosingMethod)) ref(thisParam).withPos(tree.pos)
+ else tree
+
+ /** Rwrites a call to a method `m` which is made static as folows:
+ *
+ * qual.m(args) --> m(qual, args)
+ */
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) =
+ tree.fun match {
+ case fun @ Select(qual, name) if shouldBeStatic(fun.symbol) =>
+ ctx.debuglog(i"mapping $tree to ${cpy.Ident(fun)(name)} (${qual :: tree.args}%, %)")
+ cpy.Apply(tree)(ref(fun.symbol).withPos(fun.pos), qual :: tree.args)
+ case _ =>
+ tree
+ }
+
+ override def transformClosure(tree: Closure)(implicit ctx: Context, info: TransformerInfo) =
+ tree.meth match {
+ case meth @ Select(qual, name) if shouldBeStatic(meth.symbol) =>
+ cpy.Closure(tree)(
+ env = qual :: tree.env,
+ meth = ref(meth.symbol).withPos(meth.pos))
+ case _ =>
+ tree
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
new file mode 100644
index 000000000..e718a7e60
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
@@ -0,0 +1,115 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import Contexts.Context
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import ast.Trees._
+import util.Positions._
+import Names._
+import collection.mutable
+import ResolveSuper._
+
+/** This phase adds super accessors and method overrides where
+ * linearization differs from Java's rule for default methods in interfaces.
+ * In particular:
+ *
+ * For every trait M directly implemented by the class (see SymUtils.mixin), in
+ * reverse linearization order, add the following definitions to C:
+ *
+ * 3.1 (done in `superAccessors`) For every superAccessor
+ * `<mods> def super$f[Ts](ps1)...(psN): U` in M:
+ *
+ * <mods> def super$f[Ts](ps1)...(psN): U = super[S].f[Ts](ps1)...(psN)
+ *
+ * where `S` is the superclass of `M` in the linearization of `C`.
+ *
+ * 3.2 (done in `methodOverrides`) For every method
+ * `<mods> def f[Ts](ps1)...(psN): U` in M` that needs to be disambiguated:
+ *
+ * <mods> def f[Ts](ps1)...(psN): U = super[M].f[Ts](ps1)...(psN)
+ *
+ * A method in M needs to be disambiguated if it is concrete, not overridden in C,
+ * and if it overrides another concrete method.
+ *
+ * This is the first part of what was the mixin phase. It is complemented by
+ * Mixin, which runs after erasure.
+ */
+class ResolveSuper extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "resolveSuper"
+
+ override def runsAfter = Set(classOf[ElimByName], // verified empirically, need to figure out what the reason is.
+ classOf[AugmentScala2Traits])
+
+ override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo) = {
+ val cls = impl.symbol.owner.asClass
+ val ops = new MixinOps(cls, thisTransform)
+ import ops._
+
+ def superAccessors(mixin: ClassSymbol): List[Tree] =
+ for (superAcc <- mixin.info.decls.filter(_ is SuperAccessor).toList)
+ yield polyDefDef(implementation(superAcc.asTerm), forwarder(rebindSuper(cls, superAcc)))
+
+ def methodOverrides(mixin: ClassSymbol): List[Tree] =
+ for (meth <- mixin.info.decls.toList if needsForwarder(meth))
+ yield polyDefDef(implementation(meth.asTerm), forwarder(meth))
+
+ val overrides = mixins.flatMap(mixin => superAccessors(mixin) ::: methodOverrides(mixin))
+
+ cpy.Template(impl)(body = overrides ::: impl.body)
+ }
+
+ override def transformDefDef(ddef: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
+ val meth = ddef.symbol.asTerm
+ if (meth.is(SuperAccessor, butNot = Deferred)) {
+ assert(ddef.rhs.isEmpty)
+ val cls = meth.owner.asClass
+ val ops = new MixinOps(cls, thisTransform)
+ import ops._
+ polyDefDef(meth, forwarder(rebindSuper(cls, meth)))
+ }
+ else ddef
+ }
+
+ private val PrivateOrAccessorOrDeferred = Private | Accessor | Deferred
+}
+
+object ResolveSuper{
+ /** Returns the symbol that is accessed by a super-accessor in a mixin composition.
+ *
+ * @param base The class in which everything is mixed together
+ * @param acc The symbol statically referred to by the superaccessor in the trait
+ */
+ def rebindSuper(base: Symbol, acc: Symbol)(implicit ctx: Context): Symbol = {
+ var bcs = base.info.baseClasses.dropWhile(acc.owner != _).tail
+ var sym: Symbol = NoSymbol
+ val unexpandedAccName =
+ if (acc.is(ExpandedName)) // Cannot use unexpandedName because of #765. t2183.scala would fail if we did.
+ acc.name
+ .drop(acc.name.indexOfSlice(nme.EXPAND_SEPARATOR ++ nme.SUPER_PREFIX))
+ .drop(nme.EXPAND_SEPARATOR.length)
+ else acc.name
+ val SuperAccessorName(memberName) = unexpandedAccName: Name // dotty deviation: ": Name" needed otherwise pattern type is neither a subtype nor a supertype of selector type
+ ctx.debuglog(i"starting rebindsuper from $base of ${acc.showLocated}: ${acc.info} in $bcs, name = $memberName")
+ while (bcs.nonEmpty && sym == NoSymbol) {
+ val other = bcs.head.info.nonPrivateDecl(memberName)
+ if (ctx.settings.debug.value)
+ ctx.log(i"rebindsuper ${bcs.head} $other deferred = ${other.symbol.is(Deferred)}")
+ sym = other.matchingDenotation(base.thisType, base.thisType.memberInfo(acc)).symbol
+ bcs = bcs.tail
+ }
+ assert(sym.exists)
+ sym
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
new file mode 100644
index 000000000..8b9d2be0d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
@@ -0,0 +1,67 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import DenotTransformers.IdentityDenotTransformer
+import Contexts.Context
+import Symbols._
+import Scopes._
+import collection.mutable
+import TreeTransforms.MiniPhaseTransform
+import SymDenotations._
+import ast.Trees._
+import NameOps._
+import TreeTransforms.TransformerInfo
+import StdNames._
+
+/** The preceding lambda lift and flatten phases move symbols to different scopes
+ * and rename them. This miniphase cleans up afterwards and makes sure that all
+ * class scopes contain the symbols defined in them.
+ */
+class RestoreScopes extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+ override def phaseName = "restoreScopes"
+
+ /* Note: We need to wait until we see a package definition because
+ * DropEmptyConstructors changes template members when analyzing the
+ * enclosing package definitions. So by the time RestoreScopes gets to
+ * see a typedef or template, it still might be changed by DropEmptyConstructors.
+ */
+ override def transformPackageDef(pdef: PackageDef)(implicit ctx: Context, info: TransformerInfo) = {
+ pdef.stats.foreach(restoreScope)
+ pdef
+ }
+
+ private def restoreScope(tree: Tree)(implicit ctx: Context, info: TransformerInfo) = tree match {
+ case TypeDef(_, impl: Template) =>
+ val restoredDecls = newScope
+ for (stat <- impl.constr :: impl.body)
+ if (stat.isInstanceOf[MemberDef] && stat.symbol.exists)
+ restoredDecls.enter(stat.symbol)
+ // Enter class in enclosing package scope, in case it was an inner class before flatten.
+ // For top-level classes this does nothing.
+ val cls = tree.symbol.asClass
+ val pkg = cls.owner.asClass
+
+ // Bring back companion links
+ val companionClass = cls.info.decls.lookup(nme.COMPANION_CLASS_METHOD)
+ val companionModule = cls.info.decls.lookup(nme.COMPANION_MODULE_METHOD)
+
+ if (companionClass.exists) {
+ restoredDecls.enter(companionClass)
+ }
+
+ if (companionModule.exists) {
+ restoredDecls.enter(companionModule)
+ }
+
+ pkg.enter(cls)
+ val cinfo = cls.classInfo
+ tree.symbol.copySymDenotation(
+ info = cinfo.derivedClassInfo( // Dotty deviation: Cannot expand cinfo inline without a type error
+ decls = restoredDecls: Scope)).installAfter(thisTransform)
+ tree
+ case tree => tree
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
new file mode 100644
index 000000000..5d60bb984
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
@@ -0,0 +1,56 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer
+import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core._
+import dotty.tools.dotc.transform.TreeTransforms._
+
+/** Removes selects that would be compiled into GetStatic
+ * otherwise backend needs to be aware that some qualifiers need to be dropped.
+ * Similar transformation seems to be performed by flatten in nsc
+ * @author Dmytro Petrashko
+ */
+class SelectStatic extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "selectStatic"
+
+ override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ val sym = tree.symbol
+ def isStaticMember =
+ (sym is Flags.Module) && sym.initial.maybeOwner.initial.isStaticOwner ||
+ (sym is Flags.JavaStatic) ||
+ (sym.maybeOwner is Flags.ImplClass) ||
+ sym.hasAnnotation(ctx.definitions.ScalaStaticAnnot)
+ val isStaticRef = !sym.is(Package) && !sym.maybeOwner.is(Package) && isStaticMember
+ val tree1 =
+ if (isStaticRef && !tree.qualifier.symbol.is(JavaModule) && !tree.qualifier.isType)
+ Block(List(tree.qualifier), ref(sym))
+ else tree
+
+ normalize(tree1)
+ }
+
+ private def normalize(t: Tree)(implicit ctx: Context) = t match {
+ case Select(Block(stats, qual), nm) =>
+ Block(stats, cpy.Select(t)(qual, nm))
+ case Apply(Block(stats, qual), nm) =>
+ Block(stats, Apply(qual, nm))
+ case TypeApply(Block(stats, qual), nm) =>
+ Block(stats, TypeApply(qual, nm))
+ case _ => t
+ }
+
+ override def transformApply(tree: tpd.Apply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ normalize(tree)
+ }
+
+ override def transformTypeApply(tree: tpd.TypeApply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ normalize(tree)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
new file mode 100644
index 000000000..49ea69530
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
@@ -0,0 +1,48 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Types._
+import dotty.tools.dotc.transform.TreeTransforms._
+import Contexts.Context
+import Symbols._
+import Phases._
+import Decorators._
+
+/** A transformer that eliminates SeqLiteral's, transforming `SeqLiteral(elems)` to an operation
+ * equivalent to
+ *
+ * JavaSeqLiteral(elems).toSeq
+ *
+ * Instead of `toSeq`, which takes an implicit, the appropriate "wrapArray" method
+ * is called directly. The reason for this step is that JavaSeqLiterals, being arrays
+ * keep a precise type after erasure, whereas SeqLiterals only get the erased type `Seq`,
+ */
+class SeqLiterals extends MiniPhaseTransform {
+ import ast.tpd._
+
+ override def phaseName = "seqLiterals"
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[PatternMatcher])
+
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case tpd: SeqLiteral => assert(tpd.isInstanceOf[JavaSeqLiteral])
+ case _ =>
+ }
+
+ override def transformSeqLiteral(tree: SeqLiteral)(implicit ctx: Context, info: TransformerInfo): Tree = tree match {
+ case tree: JavaSeqLiteral => tree
+ case _ =>
+ val arr = JavaSeqLiteral(tree.elems, tree.elemtpt)
+ //println(i"trans seq $tree, arr = $arr: ${arr.tpe} ${arr.tpe.elemType}")
+ val elemtp = tree.elemtpt.tpe
+ val elemCls = elemtp.classSymbol
+ val (wrapMethStr, targs) =
+ if (elemCls.isPrimitiveValueClass) (s"wrap${elemCls.name}Array", Nil)
+ else if (elemtp derivesFrom defn.ObjectClass) ("wrapRefArray", elemtp :: Nil)
+ else ("genericWrapArray", elemtp :: Nil)
+ ref(defn.ScalaPredefModule)
+ .select(wrapMethStr.toTermName)
+ .appliedToTypes(targs)
+ .appliedTo(arr)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/Splitter.scala b/compiler/src/dotty/tools/dotc/transform/Splitter.scala
new file mode 100644
index 000000000..d62be1a82
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Splitter.scala
@@ -0,0 +1,121 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import ast.Trees._
+import core._
+import Contexts._, Types._, Decorators._, Denotations._, Symbols._, SymDenotations._, Names._
+
+/** Distribute applications into Block and If nodes
+ */
+class Splitter extends MiniPhaseTransform { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "splitter"
+
+ /** Distribute arguments among splitted branches */
+ def distribute(tree: GenericApply[Type], rebuild: (Tree, List[Tree]) => Context => Tree)(implicit ctx: Context) = {
+ def recur(fn: Tree): Tree = fn match {
+ case Block(stats, expr) => Block(stats, recur(expr))
+ case If(cond, thenp, elsep) => If(cond, recur(thenp), recur(elsep))
+ case _ => rebuild(fn, tree.args)(ctx) withPos tree.pos
+ }
+ recur(tree.fun)
+ }
+
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo) =
+ distribute(tree, typeApply)
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) =
+ distribute(tree, apply)
+
+ private val typeApply = (fn: Tree, args: List[Tree]) => (ctx: Context) => TypeApply(fn, args)(ctx)
+ private val apply = (fn: Tree, args: List[Tree]) => (ctx: Context) => Apply(fn, args)(ctx)
+
+/* The following is no longer necessary, since we select members on the join of an or type:
+ *
+ /** If we select a name, make sure the node has a symbol.
+ * If necessary, split the qualifier with type tests.
+ * Example: Assume:
+ *
+ * class A { def f(x: S): T }
+ * class B { def f(x: S): T }
+ * def p(): A | B
+ *
+ * Then p().f(a) translates to
+ *
+ * val ev$1 = p()
+ * if (ev$1.isInstanceOf[A]) ev$1.asInstanceOf[A].f(a)
+ * else ev$1.asInstanceOf[B].f(a)
+ */
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) = {
+ val Select(qual, name) = tree
+
+ def memberDenot(tp: Type): SingleDenotation = {
+ val mbr = tp.member(name)
+ if (!mbr.isOverloaded) mbr.asSingleDenotation
+ else tree.tpe match {
+ case tref: TermRefWithSignature => mbr.atSignature(tref.sig).checkUnique
+ case _ =>
+ def alts = mbr.alternatives.map(alt => i"$alt: ${alt.info}").mkString(", ")
+ ctx.error(s"cannot disambiguate overloaded members $alts", tree.pos)
+ NoDenotation
+ }
+ }
+
+ def candidates(tp: Type): List[Symbol] = {
+ val mbr = memberDenot(tp)
+ if (mbr.symbol.exists) mbr.symbol :: Nil
+ else tp.widen match {
+ case tref: TypeRef =>
+ tref.info match {
+ case TypeBounds(_, hi) => candidates(hi)
+ case _ => Nil
+ }
+ case OrType(tp1, tp2) =>
+ candidates(tp1) | candidates(tp2)
+ case AndType(tp1, tp2) =>
+ candidates(tp1) & candidates(tp2)
+ case tpw =>
+ Nil
+ }
+ }
+
+ def isStructuralSelect(tp: Type): Boolean = tp.stripTypeVar match {
+ case tp: RefinedType => tp.refinedName == name || isStructuralSelect(tp.parent)
+ case tp: TypeProxy => isStructuralSelect(tp.underlying)
+ case AndType(tp1, tp2) => isStructuralSelect(tp1) || isStructuralSelect(tp2)
+ case _ => false
+ }
+
+ if (tree.symbol.exists) tree
+ else {
+ def choose(qual: Tree, syms: List[Symbol]): Tree = {
+ def testOrCast(which: Symbol, mbr: Symbol) =
+ qual.select(which).appliedToType(mbr.owner.typeRef)
+ def select(sym: Symbol) = {
+ val qual1 =
+ if (qual.tpe derivesFrom sym.owner) qual
+ else testOrCast(defn.Any_asInstanceOf, sym)
+ qual1.select(sym).withPos(tree.pos)
+ }
+ syms match {
+ case Nil =>
+ def msg =
+ if (isStructuralSelect(qual.tpe))
+ s"cannot access member '$name' from structural type ${qual.tpe.widen.show}; use Dynamic instead"
+ else
+ s"no candidate symbols for ${tree.tpe.show} found in ${qual.tpe.show}"
+ ctx.error(msg, tree.pos)
+ tree
+ case sym :: Nil =>
+ select(sym)
+ case sym :: syms1 =>
+ If(testOrCast(defn.Any_isInstanceOf, sym), select(sym), choose(qual, syms1))
+ }
+ }
+ evalOnce(qual)(qual => choose(qual, candidates(qual.tpe)))
+ }
+ }
+*/
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
new file mode 100644
index 000000000..fea478c9b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
@@ -0,0 +1,424 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransform, TreeTransformer}
+import dotty.tools.dotc.ast.{Trees, tpd}
+import scala.collection.{ mutable, immutable }
+import ValueClasses._
+import scala.annotation.tailrec
+import core._
+import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._
+import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
+import util.Positions._
+import Decorators._
+import Symbols._, TypeUtils._
+
+/** This class performs the following functions:
+ *
+ * (1) Adds super accessors for all super calls that either
+ * appear in a trait or have as a target a member of some outer class.
+ *
+ * (2) Adds protected accessors if the access to the protected member happens
+ * in a class which is not a subclass of the member's owner.
+ *
+ * It also checks that:
+ *
+ * (1) Symbols accessed from super are not abstract, or are overridden by
+ * an abstract override.
+ *
+ * (2) If a symbol accessed from super is defined in a real class (not a trait),
+ * there are no abstract members which override this member in Java's rules
+ * (see SI-4989; such an access would lead to illegal bytecode)
+ *
+ * (3) Super calls do not go to some synthetic members of Any (see isDisallowed)
+ *
+ * (4) Super calls do not go to synthetic field accessors
+ */
+class SuperAccessors(thisTransformer: DenotTransformer) {
+
+ import tpd._
+
+
+ /** Some parts of trees will get a new owner in subsequent phases.
+ * These are value class methods, which will become extension methods.
+ * (By-name arguments used to be included also, but these
+ * don't get a new class anymore, they are just wrapped in a new method).
+ *
+ * These regions will have to be treated specially for the purpose
+ * of adding accessors. For instance, super calls from these regions
+ * always have to go through an accessor.
+ *
+ * The `invalidEnclClass` field, if different from NoSymbol,
+ * contains the symbol that is not a valid owner.
+ */
+ private var invalidEnclClass: Symbol = NoSymbol
+
+ private def withInvalidCurrentClass[A](trans: => A)(implicit ctx: Context): A = {
+ val saved = invalidEnclClass
+ invalidEnclClass = ctx.owner
+ try trans
+ finally invalidEnclClass = saved
+ }
+
+ private def validCurrentClass(implicit ctx: Context): Boolean =
+ ctx.owner.enclosingClass != invalidEnclClass
+
+ /** List buffers for new accessor definitions, indexed by class */
+ private val accDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
+
+ /** A super accessor call corresponding to `sel` */
+ private def superAccessorCall(sel: Select)(implicit ctx: Context) = {
+ val Select(qual, name) = sel
+ val sym = sel.symbol
+ val clazz = qual.symbol.asClass
+ var supername = name.superName
+ if (clazz is Trait) supername = supername.expandedName(clazz)
+
+ val superAcc = clazz.info.decl(supername).suchThat(_.signature == sym.signature).symbol orElse {
+ ctx.debuglog(s"add super acc ${sym.showLocated} to $clazz")
+ val deferredOrPrivate = if (clazz is Trait) Deferred | ExpandedName else Private
+ val acc = ctx.newSymbol(
+ clazz, supername, SuperAccessor | Artifact | Method | deferredOrPrivate,
+ sel.tpe.widenSingleton.ensureMethodic, coord = sym.coord).enteredAfter(thisTransformer)
+ // Diagnostic for SI-7091
+ if (!accDefs.contains(clazz))
+ ctx.error(s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz is Package}. Accessor required for ${sel} (${sel.show})", sel.pos)
+ else accDefs(clazz) += DefDef(acc, EmptyTree)
+ acc
+ }
+
+ This(clazz).select(superAcc).withPos(sel.pos)
+ }
+
+ /** Check selection `super.f` for conforming to rules. If necessary,
+ * replace by a super accessor call.
+ */
+ private def transformSuperSelect(sel: Select)(implicit ctx: Context): Tree = {
+ val Select(sup @ Super(_, mix), name) = sel
+ val sym = sel.symbol
+ assert(sup.symbol.exists, s"missing symbol in $sel: ${sup.tpe}")
+ val clazz = sup.symbol.asClass
+
+ if (sym.isTerm && !sym.is(Method, butNot = Accessor) && !ctx.owner.is(ParamForwarder))
+ // ParamForwaders as installed ParamForwarding.scala do use super calls to vals
+ ctx.error(s"super may be not be used on ${sym.underlyingSymbol}", sel.pos)
+ else if (isDisallowed(sym))
+ ctx.error(s"super not allowed here: use this.${sel.name.decode} instead", sel.pos)
+ else if (sym is Deferred) {
+ val member = sym.overridingSymbol(clazz)
+ if (!mix.name.isEmpty ||
+ !member.exists ||
+ !((member is AbsOverride) && member.isIncompleteIn(clazz)))
+ ctx.error(
+ i"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'",
+ sel.pos)
+ else ctx.log(i"ok super $sel ${sym.showLocated} $member $clazz ${member.isIncompleteIn(clazz)}")
+ }
+ else if (mix.name.isEmpty && !(sym.owner is Trait))
+ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
+ for (intermediateClass <- clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)) {
+ val overriding = sym.overridingSymbol(intermediateClass)
+ if ((overriding is (Deferred, butNot = AbsOverride)) && !(overriding.owner is Trait))
+ ctx.error(
+ s"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract",
+ sel.pos)
+
+ }
+ if (name.isTermName && mix.name.isEmpty &&
+ ((clazz is Trait) || clazz != ctx.owner.enclosingClass || !validCurrentClass))
+ superAccessorCall(sel)(ctx.withPhase(thisTransformer.next))
+ else sel
+ }
+
+ /** Disallow some super.XX calls targeting Any methods which would
+ * otherwise lead to either a compiler crash or runtime failure.
+ */
+ private def isDisallowed(sym: Symbol)(implicit ctx: Context) = {
+ val d = defn
+ import d._
+ (sym eq Any_isInstanceOf) ||
+ (sym eq Any_asInstanceOf) ||
+ (sym eq Any_==) ||
+ (sym eq Any_!=) ||
+ (sym eq Any_##)
+ }
+
+ /** Replace `sel` (or `sel[targs]` if `targs` is nonempty) with a protected accessor
+ * call, if necessary.
+ */
+ private def ensureProtectedAccessOK(sel: Select, targs: List[Tree])(implicit ctx: Context) = {
+ val sym = sel.symbol
+ if (sym.isTerm && !sel.name.isOuterSelect && needsProtectedAccessor(sym, sel.pos)) {
+ ctx.debuglog("Adding protected accessor for " + sel)
+ protectedAccessorCall(sel, targs)
+ } else sel
+ }
+
+ /** Add a protected accessor, if needed, and return a tree that calls
+ * the accessor and returns the same member. The result is already
+ * typed.
+ */
+ private def protectedAccessorCall(sel: Select, targs: List[Tree])(implicit ctx: Context): Tree = {
+ val Select(qual, _) = sel
+ val sym = sel.symbol.asTerm
+ val clazz = hostForAccessorOf(sym, currentClass)
+ assert(clazz.exists, sym)
+ ctx.debuglog("Decided for host class: " + clazz)
+
+ val accName = sym.name.protectedAccessorName
+
+ // if the result type depends on the this type of an enclosing class, the accessor
+ // has to take an object of exactly this type, otherwise it's more general
+ val receiverType =
+ if (isThisType(sym.info.finalResultType)) clazz.thisType
+ else clazz.classInfo.selfType
+ val accType = {
+ def accTypeOf(tpe: Type): Type = tpe match {
+ case tpe: PolyType =>
+ tpe.derivedPolyType(tpe.paramNames, tpe.paramBounds, accTypeOf(tpe.resultType))
+ case _ =>
+ MethodType(receiverType :: Nil)(mt => tpe.substThis(sym.owner.asClass, MethodParam(mt, 0)))
+ }
+ accTypeOf(sym.info)
+ }
+ val protectedAccessor = clazz.info.decl(accName).suchThat(_.signature == accType.signature).symbol orElse {
+ val newAcc = ctx.newSymbol(
+ clazz, accName, Artifact, accType, coord = sel.pos).enteredAfter(thisTransformer)
+ val code = polyDefDef(newAcc, trefs => vrefss => {
+ val (receiver :: _) :: tail = vrefss
+ val base = receiver.select(sym).appliedToTypes(trefs)
+ (base /: vrefss)(Apply(_, _))
+ })
+ ctx.debuglog("created protected accessor: " + code)
+ accDefs(clazz) += code
+ newAcc
+ }
+ val res = This(clazz)
+ .select(protectedAccessor)
+ .appliedToTypeTrees(targs)
+ .appliedTo(qual)
+ .withPos(sel.pos)
+ ctx.debuglog(s"Replaced $sel with $res")
+ res
+ }
+
+ def isProtectedAccessor(tree: Tree)(implicit ctx: Context): Boolean = tree match {
+ case Apply(TypeApply(Select(_, name), _), qual :: Nil) => name.isProtectedAccessorName
+ case _ => false
+ }
+
+ /** Add a protected accessor, if needed, and return a tree that calls
+ * the accessor and returns the same member. The result is already
+ * typed.
+ */
+ private def protectedAccessor(tree: Select, targs: List[Tree])(implicit ctx: Context): Tree = {
+ val Select(qual, _) = tree
+ val sym = tree.symbol.asTerm
+ val clazz = hostForAccessorOf(sym, currentClass)
+ assert(clazz.exists, sym)
+ ctx.debuglog("Decided for host class: " + clazz)
+
+ val accName = sym.name.protectedAccessorName
+
+ // if the result type depends on the this type of an enclosing class, the accessor
+ // has to take an object of exactly this type, otherwise it's more general
+ val receiverType =
+ if (isThisType(sym.info.finalResultType)) clazz.thisType
+ else clazz.classInfo.selfType
+ def accTypeOf(tpe: Type): Type = tpe match {
+ case tpe: PolyType =>
+ tpe.derivedPolyType(tpe.paramNames, tpe.paramBounds, accTypeOf(tpe.resultType))
+ case _ =>
+ MethodType(receiverType :: Nil)(mt => tpe.substThis(sym.owner.asClass, MethodParam(mt, 0)))
+ }
+ val accType = accTypeOf(sym.info)
+ val protectedAccessor = clazz.info.decl(accName).suchThat(_.signature == accType.signature).symbol orElse {
+ val newAcc = ctx.newSymbol(
+ clazz, accName, Artifact, accType, coord = tree.pos).enteredAfter(thisTransformer)
+ val code = polyDefDef(newAcc, trefs => vrefss => {
+ val (receiver :: _) :: tail = vrefss
+ val base = receiver.select(sym).appliedToTypes(trefs)
+ (base /: vrefss)(Apply(_, _))
+ })
+ ctx.debuglog("created protected accessor: " + code)
+ accDefs(clazz) += code
+ newAcc
+ }
+ val res = This(clazz)
+ .select(protectedAccessor)
+ .appliedToTypeTrees(targs)
+ .appliedTo(qual)
+ .withPos(tree.pos)
+ ctx.debuglog(s"Replaced $tree with $res")
+ res
+ }
+
+ /** Add an accessor for field, if needed, and return a selection tree for it .
+ * The result is not typed.
+ */
+ private def protectedSetter(tree: Select)(implicit ctx: Context): Tree = {
+ val field = tree.symbol.asTerm
+ val clazz = hostForAccessorOf(field, currentClass)
+ assert(clazz.exists, field)
+ ctx.debuglog("Decided for host class: " + clazz)
+
+ val accName = field.name.protectedSetterName
+ val accType = MethodType(clazz.classInfo.selfType :: field.info :: Nil, defn.UnitType)
+ val protectedAccessor = clazz.info.decl(accName).symbol orElse {
+ val newAcc = ctx.newSymbol(
+ clazz, accName, Artifact, accType, coord = tree.pos).enteredAfter(thisTransformer)
+ val code = DefDef(newAcc, vrefss => {
+ val (receiver :: value :: Nil) :: Nil = vrefss
+ Assign(receiver.select(field), value).withPos(tree.pos)
+ })
+ ctx.debuglog("created protected setter: " + code)
+ accDefs(clazz) += code
+ newAcc
+ }
+ This(clazz).select(protectedAccessor).withPos(tree.pos)
+ }
+
+ /** Does `sym` need an accessor when accessed from `currentClass`?
+ * A special case arises for classes with explicit self-types. If the
+ * self type is a Java class, and a protected accessor is needed, we issue
+ * an error. If the self type is a Scala class, we don't add an accessor.
+ * An accessor is not needed if the access boundary is larger than the
+ * enclosing package, since that translates to 'public' on the host sys.
+ * (as Java has no real package nesting).
+ *
+ * If the access happens inside a 'trait', access is more problematic since
+ * the implementation code is moved to an '$class' class which does not
+ * inherit anything. Since we can't (yet) add accessors for 'required'
+ * classes, this has to be signaled as error.
+ * FIXME Need to better understand this logic
+ */
+ private def needsProtectedAccessor(sym: Symbol, pos: Position)(implicit ctx: Context): Boolean = {
+ val clazz = currentClass
+ val host = hostForAccessorOf(sym, clazz)
+ val selfType = host.classInfo.selfType
+ def accessibleThroughSubclassing =
+ validCurrentClass && (selfType <:< sym.owner.typeRef) && !clazz.is(Trait)
+
+ val isCandidate = (
+ sym.is(Protected)
+ && sym.is(JavaDefined)
+ && !sym.effectiveOwner.is(Package)
+ && !accessibleThroughSubclassing
+ && (sym.enclosingPackageClass != currentClass.enclosingPackageClass)
+ && (sym.enclosingPackageClass == sym.accessBoundary(sym.enclosingPackageClass))
+ )
+ def isSelfType = !(host.typeRef <:< selfType) && {
+ if (selfType.typeSymbol.is(JavaDefined))
+ ctx.restrictionError(s"cannot accesses protected $sym from within $clazz with self type $selfType", pos)
+ true
+ }
+ def isJavaProtected = host.is(Trait) && sym.is(JavaDefined) && {
+ ctx.restrictionError(
+ s"""$clazz accesses protected $sym inside a concrete trait method.
+ |Add an accessor in a class extending ${sym.enclosingClass} as a workaround.""".stripMargin,
+ pos
+ )
+ true
+ }
+ isCandidate && !host.is(Package) && !isSelfType && !isJavaProtected
+ }
+
+ /** Return the innermost enclosing class C of referencingClass for which either
+ * of the following holds:
+ * - C is a subclass of sym.owner or
+ * - C is declared in the same package as sym's owner
+ */
+ private def hostForAccessorOf(sym: Symbol, referencingClass: ClassSymbol)(implicit ctx: Context): ClassSymbol =
+ if (referencingClass.derivesFrom(sym.owner)
+ || referencingClass.classInfo.selfType <:< sym.owner.typeRef
+ || referencingClass.enclosingPackageClass == sym.owner.enclosingPackageClass) {
+ assert(referencingClass.isClass, referencingClass)
+ referencingClass
+ }
+ else if (referencingClass.owner.enclosingClass.exists)
+ hostForAccessorOf(sym, referencingClass.owner.enclosingClass.asClass)
+ else
+ referencingClass
+
+ /** Is 'tpe' a ThisType, or a type proxy with a ThisType as transitively underlying type? */
+ private def isThisType(tpe: Type)(implicit ctx: Context): Boolean = tpe match {
+ case tpe: ThisType => !tpe.cls.is(PackageClass)
+ case tpe: TypeProxy => isThisType(tpe.underlying)
+ case _ => false
+ }
+
+ /** Transform select node, adding super and protected accessors as needed */
+ def transformSelect(tree: Tree, targs: List[Tree])(implicit ctx: Context) = {
+ val sel @ Select(qual, name) = tree
+ val sym = sel.symbol
+ qual match {
+ case _: This =>
+ /*
+ * A trait which extends a class and accesses a protected member
+ * of that class cannot implement the necessary accessor method
+ * because its implementation is in an implementation class (e.g.
+ * Foo$class) which inherits nothing, and jvm access restrictions
+ * require the call site to be in an actual subclass. So non-trait
+ * classes inspect their ancestors for any such situations and
+ * generate the accessors. See SI-2296.
+ */
+ // FIXME (from scalac's SuperAccessors)
+ // - this should be unified with needsProtectedAccessor, but some
+ // subtlety which presently eludes me is foiling my attempts.
+ val shouldEnsureAccessor = (
+ (currentClass is Trait)
+ && (sym is Protected)
+ && sym.enclosingClass != currentClass
+ && !(sym.owner is PackageClass) // SI-7091 no accessor needed package owned (ie, top level) symbols
+ && !(sym.owner is Trait)
+ && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass
+ && qual.symbol.info.member(sym.name).exists
+ && !needsProtectedAccessor(sym, sel.pos))
+ if (shouldEnsureAccessor) {
+ ctx.log("Ensuring accessor for call to protected " + sym.showLocated + " from " + currentClass)
+ superAccessorCall(sel)
+ } else
+ ensureProtectedAccessOK(sel, targs)
+
+ case Super(_, mix) =>
+ transformSuperSelect(sel)
+
+ case _ =>
+ ensureProtectedAccessOK(sel, targs)
+ }
+ }
+
+ /** Transform assignment, adding a protected setter if needed */
+ def transformAssign(tree: Tree)(implicit ctx: Context) = {
+ val Assign(lhs @ Select(qual, name), rhs) = tree
+ if ((lhs.symbol is Mutable) &&
+ (lhs.symbol is JavaDefined) &&
+ needsProtectedAccessor(lhs.symbol, tree.pos)) {
+ ctx.debuglog("Adding protected setter for " + tree)
+ val setter = protectedSetter(lhs)
+ ctx.debuglog("Replaced " + tree + " with " + setter)
+ setter.appliedTo(qual, rhs)
+ }
+ else tree
+ }
+
+ /** Wrap template to template transform `op` with needed initialization and finalization */
+ def wrapTemplate(tree: Template)(op: Template => Template)(implicit ctx: Context) = {
+ accDefs(currentClass) = new mutable.ListBuffer[Tree]
+ val impl = op(tree)
+ val accessors = accDefs.remove(currentClass).get
+ if (accessors.isEmpty) impl
+ else {
+ val (params, rest) = impl.body span {
+ case td: TypeDef => !td.isClassDef
+ case vd: ValOrDefDef => vd.symbol.flags is ParamAccessor
+ case _ => false
+ }
+ cpy.Template(impl)(body = params ++ accessors ++ rest)
+ }
+ }
+
+ /** Wrap `DefDef` producing operation `op`, potentially setting `invalidClass` info */
+ def wrapDefDef(ddef: DefDef)(op: => DefDef)(implicit ctx: Context) =
+ if (isMethodWithExtension(ddef.symbol)) withInvalidCurrentClass(op) else op
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
new file mode 100644
index 000000000..05305575e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
@@ -0,0 +1,117 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Types._
+import Contexts._
+import Symbols._
+import SymDenotations._
+import Decorators._
+import Names._
+import StdNames._
+import NameOps._
+import Flags._
+import Annotations._
+import language.implicitConversions
+
+object SymUtils {
+ implicit def decorateSymbol(sym: Symbol): SymUtils = new SymUtils(sym)
+ implicit def decorateSymDenot(d: SymDenotation): SymUtils = new SymUtils(d.symbol)
+}
+
+/** A decorator that provides methods on symbols
+ * that are needed in the transformer pipeline.
+ */
+class SymUtils(val self: Symbol) extends AnyVal {
+ import SymUtils._
+
+ /** All traits implemented by a class or trait except for those inherited through the superclass. */
+ def directlyInheritedTraits(implicit ctx: Context) = {
+ val superCls = self.asClass.superClass
+ val baseClasses = self.asClass.baseClasses
+ if (baseClasses.isEmpty) Nil
+ else baseClasses.tail.takeWhile(_ ne superCls).reverse
+ }
+
+ /** All traits implemented by a class, except for those inherited through the superclass.
+ * The empty list if `self` is a trait.
+ */
+ def mixins(implicit ctx: Context) = {
+ if (self is Trait) Nil
+ else directlyInheritedTraits
+ }
+
+ def isTypeTestOrCast(implicit ctx: Context): Boolean =
+ self == defn.Any_asInstanceOf || self == defn.Any_isInstanceOf
+
+ def isVolatile(implicit ctx: Context) = self.hasAnnotation(defn.VolatileAnnot)
+
+ def isAnyOverride(implicit ctx: Context) = self.is(Override) || self.is(AbsOverride)
+ // careful: AbsOverride is a term only flag. combining with Override would catch only terms.
+
+ /** If this is a constructor, its owner: otherwise this. */
+ final def skipConstructor(implicit ctx: Context): Symbol =
+ if (self.isConstructor) self.owner else self
+
+ /** The closest properly enclosing method or class of this symbol. */
+ final def enclosure(implicit ctx: Context) = {
+ self.owner.enclosingMethodOrClass
+ }
+
+ /** The closest enclosing method or class of this symbol */
+ final def enclosingMethodOrClass(implicit ctx: Context): Symbol =
+ if (self.is(Method, butNot = Label) || self.isClass) self
+ else if (self.exists) self.owner.enclosingMethodOrClass
+ else NoSymbol
+
+ /** Apply symbol/symbol substitution to this symbol */
+ def subst(from: List[Symbol], to: List[Symbol]): Symbol = {
+ def loop(from: List[Symbol], to: List[Symbol]): Symbol =
+ if (from.isEmpty) self
+ else if (self eq from.head) to.head
+ else loop(from.tail, to.tail)
+ loop(from, to)
+ }
+
+ def accessorNamed(name: TermName)(implicit ctx: Context): Symbol =
+ self.owner.info.decl(name).suchThat(_ is Accessor).symbol
+
+ def termParamAccessors(implicit ctx: Context): List[Symbol] =
+ self.info.decls.filter(_ is TermParamAccessor).toList
+
+ def caseAccessors(implicit ctx:Context) =
+ self.info.decls.filter(_ is CaseAccessor).toList
+
+ def getter(implicit ctx: Context): Symbol =
+ if (self.isGetter) self else accessorNamed(self.asTerm.name.getterName)
+
+ def setter(implicit ctx: Context): Symbol =
+ if (self.isSetter) self
+ else accessorNamed(self.asTerm.name.setterName)
+
+ def field(implicit ctx: Context): Symbol =
+ self.owner.info.decl(self.asTerm.name.fieldName).suchThat(!_.is(Method)).symbol
+
+ def isField(implicit ctx: Context): Boolean =
+ self.isTerm && !self.is(Method)
+
+ def implClass(implicit ctx: Context): Symbol =
+ self.owner.info.decl(self.name.implClassName).symbol
+
+ def annotationsCarrying(meta: ClassSymbol)(implicit ctx: Context): List[Annotation] =
+ self.annotations.filter(_.symbol.hasAnnotation(meta))
+
+ def withAnnotationsCarrying(from: Symbol, meta: ClassSymbol)(implicit ctx: Context): self.type = {
+ self.addAnnotations(from.annotationsCarrying(meta))
+ self
+ }
+
+ def registerCompanionMethod(name: Name, target: Symbol)(implicit ctx: Context) = {
+ if (!self.unforcedDecls.lookup(name).exists) {
+ val companionMethod = ctx.synthesizeCompanionMethod(name, target, self)
+ if (companionMethod.exists) {
+ companionMethod.entered
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMethods.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMethods.scala
new file mode 100644
index 000000000..9dfd92fe9
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMethods.scala
@@ -0,0 +1,198 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Symbols._, Types._, Contexts._, Names._, StdNames._, Constants._, SymUtils._
+import scala.collection.{ mutable, immutable }
+import Flags._
+import TreeTransforms._
+import DenotTransformers._
+import ast.Trees._
+import ast.untpd
+import Decorators._
+import NameOps._
+import ValueClasses.isDerivedValueClass
+import scala.collection.mutable.ListBuffer
+import scala.language.postfixOps
+
+/** Synthetic method implementations for case classes, case objects,
+ * and value classes.
+ * Selectively added to case classes/objects, unless a non-default
+ * implementation already exists:
+ * def equals(other: Any): Boolean
+ * def hashCode(): Int
+ * def canEqual(other: Any): Boolean
+ * def toString(): String
+ * def productArity: Int
+ * def productPrefix: String
+ * Special handling:
+ * protected def readResolve(): AnyRef
+ *
+ * Selectively added to value classes, unless a non-default
+ * implementation already exists:
+ *
+ * def equals(other: Any): Boolean
+ * def hashCode(): Int
+ */
+class SyntheticMethods(thisTransformer: DenotTransformer) {
+ import ast.tpd._
+
+ private var myValueSymbols: List[Symbol] = Nil
+ private var myCaseSymbols: List[Symbol] = Nil
+
+ private def initSymbols(implicit ctx: Context) =
+ if (myValueSymbols.isEmpty) {
+ myValueSymbols = List(defn.Any_hashCode, defn.Any_equals)
+ myCaseSymbols = myValueSymbols ++ List(defn.Any_toString, defn.Product_canEqual,
+ defn.Product_productArity, defn.Product_productPrefix)
+ }
+
+ def valueSymbols(implicit ctx: Context) = { initSymbols; myValueSymbols }
+ def caseSymbols(implicit ctx: Context) = { initSymbols; myCaseSymbols }
+
+ /** The synthetic methods of the case or value class `clazz`.
+ */
+ def syntheticMethods(clazz: ClassSymbol)(implicit ctx: Context): List[Tree] = {
+ val clazzType = clazz.typeRef
+ lazy val accessors =
+ if (isDerivedValueClass(clazz))
+ clazz.termParamAccessors
+ else
+ clazz.caseAccessors
+
+ val symbolsToSynthesize: List[Symbol] =
+ if (clazz.is(Case)) caseSymbols
+ else if (isDerivedValueClass(clazz)) valueSymbols
+ else Nil
+
+ def syntheticDefIfMissing(sym: Symbol): List[Tree] = {
+ val existing = sym.matchingMember(clazz.thisType)
+ if (existing == sym || existing.is(Deferred)) syntheticDef(sym) :: Nil
+ else Nil
+ }
+
+ def syntheticDef(sym: Symbol): Tree = {
+ val synthetic = sym.copy(
+ owner = clazz,
+ flags = sym.flags &~ Deferred | Synthetic | Override,
+ coord = clazz.coord).enteredAfter(thisTransformer).asTerm
+
+ def forwardToRuntime(vrefss: List[List[Tree]]): Tree =
+ ref(defn.runtimeMethodRef("_" + sym.name.toString)).appliedToArgs(This(clazz) :: vrefss.head)
+
+ def ownName(vrefss: List[List[Tree]]): Tree =
+ Literal(Constant(clazz.name.stripModuleClassSuffix.decode.toString))
+
+ def syntheticRHS(implicit ctx: Context): List[List[Tree]] => Tree = synthetic.name match {
+ case nme.hashCode_ if isDerivedValueClass(clazz) => vrefss => valueHashCodeBody
+ case nme.hashCode_ => vrefss => caseHashCodeBody
+ case nme.toString_ => if (clazz.is(ModuleClass)) ownName else forwardToRuntime
+ case nme.equals_ => vrefss => equalsBody(vrefss.head.head)
+ case nme.canEqual_ => vrefss => canEqualBody(vrefss.head.head)
+ case nme.productArity => vrefss => Literal(Constant(accessors.length))
+ case nme.productPrefix => ownName
+ }
+ ctx.log(s"adding $synthetic to $clazz at ${ctx.phase}")
+ DefDef(synthetic, syntheticRHS(ctx.withOwner(synthetic)))
+ }
+
+ /** The class
+ *
+ * case class C(x: T, y: U)
+ *
+ * gets the equals method:
+ *
+ * def equals(that: Any): Boolean =
+ * (this eq that) || {
+ * that match {
+ * case x$0 @ (_: C) => this.x == this$0.x && this.y == x$0.y
+ * case _ => false
+ * }
+ *
+ * If C is a value class the initial `eq` test is omitted.
+ */
+ def equalsBody(that: Tree)(implicit ctx: Context): Tree = {
+ val thatAsClazz = ctx.newSymbol(ctx.owner, nme.x_0, Synthetic, clazzType, coord = ctx.owner.pos) // x$0
+ def wildcardAscription(tp: Type) = Typed(Underscore(tp), TypeTree(tp))
+ val pattern = Bind(thatAsClazz, wildcardAscription(clazzType)) // x$0 @ (_: C)
+ val comparisons = accessors map (accessor =>
+ This(clazz).select(accessor).select(defn.Any_==).appliedTo(ref(thatAsClazz).select(accessor)))
+ val rhs = // this.x == this$0.x && this.y == x$0.y
+ if (comparisons.isEmpty) Literal(Constant(true)) else comparisons.reduceLeft(_ and _)
+ val matchingCase = CaseDef(pattern, EmptyTree, rhs) // case x$0 @ (_: C) => this.x == this$0.x && this.y == x$0.y
+ val defaultCase = CaseDef(wildcardAscription(defn.AnyType), EmptyTree, Literal(Constant(false))) // case _ => false
+ val matchExpr = Match(that, List(matchingCase, defaultCase))
+ if (isDerivedValueClass(clazz)) matchExpr
+ else {
+ val eqCompare = This(clazz).select(defn.Object_eq).appliedTo(that.asInstance(defn.ObjectType))
+ eqCompare or matchExpr
+ }
+ }
+
+ /** The class
+ *
+ * class C(x: T) extends AnyVal
+ *
+ * gets the hashCode method:
+ *
+ * def hashCode: Int = x.hashCode()
+ */
+ def valueHashCodeBody(implicit ctx: Context): Tree = {
+ assert(accessors.length == 1)
+ ref(accessors.head).select(nme.hashCode_).ensureApplied
+ }
+
+ /** The class
+ *
+ * case class C(x: T, y: T)
+ *
+ * gets the hashCode method:
+ *
+ * def hashCode: Int = {
+ * <synthetic> var acc: Int = 0xcafebabe;
+ * acc = Statics.mix(acc, x);
+ * acc = Statics.mix(acc, Statics.this.anyHash(y));
+ * Statics.finalizeHash(acc, 2)
+ * }
+ */
+ def caseHashCodeBody(implicit ctx: Context): Tree = {
+ val acc = ctx.newSymbol(ctx.owner, "acc".toTermName, Mutable | Synthetic, defn.IntType, coord = ctx.owner.pos)
+ val accDef = ValDef(acc, Literal(Constant(0xcafebabe)))
+ val mixes = for (accessor <- accessors.toList) yield
+ Assign(ref(acc), ref(defn.staticsMethod("mix")).appliedTo(ref(acc), hashImpl(accessor)))
+ val finish = ref(defn.staticsMethod("finalizeHash")).appliedTo(ref(acc), Literal(Constant(accessors.size)))
+ Block(accDef :: mixes, finish)
+ }
+
+ /** The hashCode implementation for given symbol `sym`. */
+ def hashImpl(sym: Symbol)(implicit ctx: Context): Tree =
+ defn.scalaClassName(sym.info.finalResultType) match {
+ case tpnme.Unit | tpnme.Null => Literal(Constant(0))
+ case tpnme.Boolean => If(ref(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+ case tpnme.Int => ref(sym)
+ case tpnme.Short | tpnme.Byte | tpnme.Char => ref(sym).select(nme.toInt)
+ case tpnme.Long => ref(defn.staticsMethod("longHash")).appliedTo(ref(sym))
+ case tpnme.Double => ref(defn.staticsMethod("doubleHash")).appliedTo(ref(sym))
+ case tpnme.Float => ref(defn.staticsMethod("floatHash")).appliedTo(ref(sym))
+ case _ => ref(defn.staticsMethod("anyHash")).appliedTo(ref(sym))
+ }
+
+ /** The class
+ *
+ * case class C(...)
+ *
+ * gets the canEqual method
+ *
+ * def canEqual(that: Any) = that.isInstanceOf[C]
+ */
+ def canEqualBody(that: Tree): Tree = that.isInstance(clazzType)
+
+ symbolsToSynthesize flatMap syntheticDefIfMissing
+ }
+
+ def addSyntheticMethods(impl: Template)(implicit ctx: Context) =
+ if (ctx.owner.is(Case) || isDerivedValueClass(ctx.owner))
+ cpy.Template(impl)(body = impl.body ++ syntheticMethods(ctx.owner.asClass))
+ else
+ impl
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala
new file mode 100644
index 000000000..dc4454439
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala
@@ -0,0 +1,384 @@
+package dotty.tools.dotc.transform
+
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.{TreeTypeMap, tpd}
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.DenotTransformers.DenotTransformer
+import dotty.tools.dotc.core.Denotations.SingleDenotation
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core._
+import dotty.tools.dotc.transform.TailRec._
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/**
+ * A Tail Rec Transformer
+ * @author Erik Stenman, Iulian Dragos,
+ * ported and heavily modified for dotty by Dmitry Petrashko
+ * @version 1.1
+ *
+ * What it does:
+ * <p>
+ * Finds method calls in tail-position and replaces them with jumps.
+ * A call is in a tail-position if it is the last instruction to be
+ * executed in the body of a method. This is done by recursing over
+ * the trees that may contain calls in tail-position (trees that can't
+ * contain such calls are not transformed). However, they are not that
+ * many.
+ * </p>
+ * <p>
+ * Self-recursive calls in tail-position are replaced by jumps to a
+ * label at the beginning of the method. As the JVM provides no way to
+ * jump from a method to another one, non-recursive calls in
+ * tail-position are not optimized.
+ * </p>
+ * <p>
+ * A method call is self-recursive if it calls the current method and
+ * the method is final (otherwise, it could
+ * be a call to an overridden method in a subclass).
+ *
+ * Recursive calls on a different instance
+ * are optimized. Since 'this' is not a local variable it s added as
+ * a label parameter.
+ * </p>
+ * <p>
+ * This phase has been moved before pattern matching to catch more
+ * of the common cases of tail recursive functions. This means that
+ * more cases should be taken into account (like nested function, and
+ * pattern cases).
+ * </p>
+ * <p>
+ * If a method contains self-recursive calls, a label is added to at
+ * the beginning of its body and the calls are replaced by jumps to
+ * that label.
+ * </p>
+ * <p>
+ *
+ * In scalac, If the method had type parameters, the call must contain same
+ * parameters as type arguments. This is no longer case in dotc.
+ * In scalac, this is named tailCall but it does only provide optimization for
+ * self recursive functions, that's why it's renamed to tailrec
+ * </p>
+ */
+class TailRec extends MiniPhaseTransform with DenotTransformer with FullParameterization { thisTransform =>
+
+ import dotty.tools.dotc.ast.tpd._
+
+ override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
+
+ override def phaseName: String = "tailrec"
+ override def treeTransformPhase = thisTransform // TODO Make sure tailrec runs at next phase.
+
+ final val labelPrefix = "tailLabel"
+ final val labelFlags = Flags.Synthetic | Flags.Label
+
+ /** Symbols of methods that have @tailrec annotatios inside */
+ private val methodsWithInnerAnnots = new collection.mutable.HashSet[Symbol]()
+
+ override def transformUnit(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ methodsWithInnerAnnots.clear()
+ tree
+ }
+
+ override def transformTyped(tree: Typed)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (tree.tpt.tpe.hasAnnotation(defn.TailrecAnnot))
+ methodsWithInnerAnnots += ctx.owner.enclosingMethod
+ tree
+ }
+
+ private def mkLabel(method: Symbol, abstractOverClass: Boolean)(implicit c: Context): TermSymbol = {
+ val name = c.freshName(labelPrefix)
+
+ if (method.owner.isClass)
+ c.newSymbol(method, name.toTermName, labelFlags, fullyParameterizedType(method.info, method.enclosingClass.asClass, abstractOverClass, liftThisType = false))
+ else c.newSymbol(method, name.toTermName, labelFlags, method.info)
+ }
+
+ override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ val sym = tree.symbol
+ tree match {
+ case dd@DefDef(name, tparams, vparamss0, tpt, _)
+ if (sym.isEffectivelyFinal) && !((sym is Flags.Accessor) || (dd.rhs eq EmptyTree) || (sym is Flags.Label)) =>
+ val mandatory = sym.hasAnnotation(defn.TailrecAnnot)
+ atGroupEnd { implicit ctx: Context =>
+
+ cpy.DefDef(dd)(rhs = {
+
+ val defIsTopLevel = sym.owner.isClass
+ val origMeth = sym
+ val label = mkLabel(sym, abstractOverClass = defIsTopLevel)
+ val owner = ctx.owner.enclosingClass.asClass
+ val thisTpe = owner.thisType.widen
+
+ var rewrote = false
+
+ // Note: this can be split in two separate transforms(in different groups),
+ // than first one will collect info about which transformations and rewritings should be applied
+ // and second one will actually apply,
+ // now this speculatively transforms tree and throws away result in many cases
+ val rhsSemiTransformed = {
+ val transformer = new TailRecElimination(origMeth, dd.tparams, owner, thisTpe, mandatory, label, abstractOverClass = defIsTopLevel)
+ val rhs = atGroupEnd(transformer.transform(dd.rhs)(_))
+ rewrote = transformer.rewrote
+ rhs
+ }
+
+ if (rewrote) {
+ val dummyDefDef = cpy.DefDef(tree)(rhs = rhsSemiTransformed)
+ if (tree.symbol.owner.isClass) {
+ val labelDef = fullyParameterizedDef(label, dummyDefDef, abstractOverClass = defIsTopLevel)
+ val call = forwarder(label, dd, abstractOverClass = defIsTopLevel, liftThisType = true)
+ Block(List(labelDef), call)
+ } else { // inner method. Tail recursion does not change `this`
+ val labelDef = polyDefDef(label, trefs => vrefss => {
+ val origMeth = tree.symbol
+ val origTParams = tree.tparams.map(_.symbol)
+ val origVParams = tree.vparamss.flatten map (_.symbol)
+ new TreeTypeMap(
+ typeMap = identity(_)
+ .substDealias(origTParams, trefs)
+ .subst(origVParams, vrefss.flatten.map(_.tpe)),
+ oldOwners = origMeth :: Nil,
+ newOwners = label :: Nil
+ ).transform(rhsSemiTransformed)
+ })
+ val callIntoLabel = (
+ if (dd.tparams.isEmpty) ref(label)
+ else ref(label).appliedToTypes(dd.tparams.map(_.tpe))
+ ).appliedToArgss(vparamss0.map(_.map(x=> ref(x.symbol))))
+ Block(List(labelDef), callIntoLabel)
+ }} else {
+ if (mandatory) ctx.error(
+ "TailRec optimisation not applicable, method not tail recursive",
+ // FIXME: want to report this error on `dd.namePos`, but
+ // because of extension method getting a weird pos, it is
+ // better to report on symbol so there's no overlap
+ sym.pos
+ )
+ dd.rhs
+ }
+ })
+ }
+ case d: DefDef if d.symbol.hasAnnotation(defn.TailrecAnnot) || methodsWithInnerAnnots.contains(d.symbol) =>
+ ctx.error("TailRec optimisation not applicable, method is neither private nor final so can be overridden", sym.pos)
+ d
+ case d if d.symbol.hasAnnotation(defn.TailrecAnnot) || methodsWithInnerAnnots.contains(d.symbol) =>
+ ctx.error("TailRec optimisation not applicable, not a method", sym.pos)
+ d
+ case _ => tree
+ }
+
+ }
+
+ class TailRecElimination(method: Symbol, methTparams: List[Tree], enclosingClass: Symbol, thisType: Type, isMandatory: Boolean, label: Symbol, abstractOverClass: Boolean) extends tpd.TreeMap {
+
+ import dotty.tools.dotc.ast.tpd._
+
+ var rewrote = false
+
+ private val defaultReason = "it contains a recursive call not in tail position"
+
+ private var ctx: TailContext = yesTailContext
+
+ /** Rewrite this tree to contain no tail recursive calls */
+ def transform(tree: Tree, nctx: TailContext)(implicit c: Context): Tree = {
+ if (ctx == nctx) transform(tree)
+ else {
+ val saved = ctx
+ ctx = nctx
+ try transform(tree)
+ finally this.ctx = saved
+ }
+ }
+
+ def yesTailTransform(tree: Tree)(implicit c: Context): Tree =
+ transform(tree, yesTailContext)
+
+ def noTailTransform(tree: Tree)(implicit c: Context): Tree =
+ transform(tree, noTailContext)
+
+ def noTailTransforms[Tr <: Tree](trees: List[Tr])(implicit c: Context): List[Tr] =
+ trees.map(noTailTransform).asInstanceOf[List[Tr]]
+
+ override def transform(tree: Tree)(implicit c: Context): Tree = {
+ /* A possibly polymorphic apply to be considered for tail call transformation. */
+ def rewriteApply(tree: Tree, sym: Symbol, required: Boolean = false): Tree = {
+ def receiverArgumentsAndSymbol(t: Tree, accArgs: List[List[Tree]] = Nil, accT: List[Tree] = Nil):
+ (Tree, Tree, List[List[Tree]], List[Tree], Symbol) = t match {
+ case TypeApply(fun, targs) if fun.symbol eq t.symbol => receiverArgumentsAndSymbol(fun, accArgs, targs)
+ case Apply(fn, args) if fn.symbol == t.symbol => receiverArgumentsAndSymbol(fn, args :: accArgs, accT)
+ case Select(qual, _) => (qual, t, accArgs, accT, t.symbol)
+ case x: This => (x, x, accArgs, accT, x.symbol)
+ case x: Ident if x.symbol eq method => (EmptyTree, x, accArgs, accT, x.symbol)
+ case x => (x, x, accArgs, accT, x.symbol)
+ }
+
+ val (prefix, call, arguments, typeArguments, symbol) = receiverArgumentsAndSymbol(tree)
+ val hasConformingTargs = (typeArguments zip methTparams).forall{x => x._1.tpe <:< x._2.tpe}
+ val recv = noTailTransform(prefix)
+
+ val targs = typeArguments.map(noTailTransform)
+ val argumentss = arguments.map(noTailTransforms)
+
+ val recvWiden = recv.tpe.widenDealias
+
+ val receiverIsSame = enclosingClass.typeRef.widenDealias =:= recvWiden
+ val receiverIsSuper = (method.name eq sym) && enclosingClass.typeRef.widen <:< recvWiden
+ val receiverIsThis = recv.tpe =:= thisType || recv.tpe.widen =:= thisType
+
+ val isRecursiveCall = (method eq sym)
+
+ def continue = {
+ val method = noTailTransform(call)
+ val methodWithTargs = if (targs.nonEmpty) TypeApply(method, targs) else method
+ if (methodWithTargs.tpe.widen.isParameterless) methodWithTargs
+ else argumentss.foldLeft(methodWithTargs) {
+ // case (method, args) => Apply(method, args) // Dotty deviation no auto-detupling yet. Interesting that one can do it in Scala2!
+ (method, args) => Apply(method, args)
+ }
+ }
+ def fail(reason: String) = {
+ if (isMandatory || required) c.error(s"Cannot rewrite recursive call: $reason", tree.pos)
+ else c.debuglog("Cannot rewrite recursive call at: " + tree.pos + " because: " + reason)
+ continue
+ }
+
+ def rewriteTailCall(recv: Tree): Tree = {
+ c.debuglog("Rewriting tail recursive call: " + tree.pos)
+ rewrote = true
+ val receiver = noTailTransform(recv)
+
+ val callTargs: List[tpd.Tree] =
+ if (abstractOverClass) {
+ val classTypeArgs = recv.tpe.baseTypeWithArgs(enclosingClass).argInfos
+ targs ::: classTypeArgs.map(x => ref(x.typeSymbol))
+ } else targs
+
+ val method = if (callTargs.nonEmpty) TypeApply(Ident(label.termRef), callTargs) else Ident(label.termRef)
+ val thisPassed =
+ if (this.method.owner.isClass)
+ method.appliedTo(receiver.ensureConforms(method.tpe.widen.firstParamTypes.head))
+ else method
+
+ val res =
+ if (thisPassed.tpe.widen.isParameterless) thisPassed
+ else argumentss.foldLeft(thisPassed) {
+ (met, ar) => Apply(met, ar) // Dotty deviation no auto-detupling yet.
+ }
+ res
+ }
+
+ if (isRecursiveCall) {
+ if (ctx.tailPos) {
+ if (!hasConformingTargs) fail("it changes type arguments on a polymorphic recursive call")
+ else if (recv eq EmptyTree) rewriteTailCall(This(enclosingClass.asClass))
+ else if (receiverIsSame || receiverIsThis) rewriteTailCall(recv)
+ else fail("it changes type of 'this' on a polymorphic recursive call")
+ }
+ else fail(defaultReason)
+ } else {
+ if (receiverIsSuper) fail("it contains a recursive call targeting a supertype")
+ else continue
+ }
+ }
+
+ def rewriteTry(tree: Try): Try = {
+ if (tree.finalizer eq EmptyTree) {
+ // SI-1672 Catches are in tail position when there is no finalizer
+ tpd.cpy.Try(tree)(
+ noTailTransform(tree.expr),
+ transformSub(tree.cases),
+ EmptyTree
+ )
+ }
+ else {
+ tpd.cpy.Try(tree)(
+ noTailTransform(tree.expr),
+ noTailTransforms(tree.cases),
+ noTailTransform(tree.finalizer)
+ )
+ }
+ }
+
+ val res: Tree = tree match {
+
+ case Ident(qual) =>
+ val sym = tree.symbol
+ if (sym == method && ctx.tailPos) rewriteApply(tree, sym)
+ else tree
+
+ case tree: Select =>
+ val sym = tree.symbol
+ if (sym == method && ctx.tailPos) rewriteApply(tree, sym)
+ else tpd.cpy.Select(tree)(noTailTransform(tree.qualifier), tree.name)
+
+ case Apply(fun, args) =>
+ val meth = fun.symbol
+ if (meth == defn.Boolean_|| || meth == defn.Boolean_&&)
+ tpd.cpy.Apply(tree)(fun, transform(args))
+ else
+ rewriteApply(tree, meth)
+
+ case tree@Block(stats, expr) =>
+ tpd.cpy.Block(tree)(
+ noTailTransforms(stats),
+ transform(expr)
+ )
+ case tree @ Typed(t: Apply, tpt) if tpt.tpe.hasAnnotation(defn.TailrecAnnot) =>
+ tpd.Typed(rewriteApply(t, t.fun.symbol, required = true), tpt)
+ case tree@If(cond, thenp, elsep) =>
+ tpd.cpy.If(tree)(
+ noTailTransform(cond),
+ transform(thenp),
+ transform(elsep)
+ )
+
+ case tree@CaseDef(_, _, body) =>
+ cpy.CaseDef(tree)(body = transform(body))
+
+ case tree@Match(selector, cases) =>
+ tpd.cpy.Match(tree)(
+ noTailTransform(selector),
+ transformSub(cases)
+ )
+
+ case tree: Try =>
+ rewriteTry(tree)
+
+ case Alternative(_) | Bind(_, _) =>
+ assert(false, "We should never have gotten inside a pattern")
+ tree
+
+ case t @ DefDef(_, _, _, _, _) =>
+ t // todo: could improve to handle DefDef's with a label flag calls to which are in tail position
+
+ case ValDef(_, _, _) | EmptyTree | Super(_, _) | This(_) |
+ Literal(_) | TypeTree() | TypeDef(_, _) =>
+ tree
+
+ case Return(expr, from) =>
+ tpd.cpy.Return(tree)(noTailTransform(expr), from)
+
+ case _ =>
+ super.transform(tree)
+ }
+
+ res
+ }
+ }
+
+ /** If references to original `target` from fully parameterized method `derived` should be
+ * rewired to some fully parameterized method, that method symbol,
+ * otherwise NoSymbol.
+ */
+ override protected def rewiredTarget(target: Symbol, derived: Symbol)(implicit ctx: Context): Symbol = NoSymbol
+}
+
+object TailRec {
+
+ final class TailContext(val tailPos: Boolean) extends AnyVal
+
+ final val noTailContext = new TailContext(false)
+ final val yesTailContext = new TailContext(true)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
new file mode 100644
index 000000000..4a09d2fef
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -0,0 +1,452 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.Names.Name
+import core.DenotTransformers._
+import core.Denotations._
+import core.SymDenotations._
+import core.Contexts._
+import core.Symbols._
+import core.Types._
+import core.Flags._
+import core.Constants._
+import core.StdNames._
+import core.Decorators._
+import core.TypeErasure.isErasedType
+import core.Phases.Phase
+import core.Mode
+import typer._
+import typer.ErrorReporting._
+import reporting.ThrowingReporter
+import ast.Trees._
+import ast.{tpd, untpd}
+import util.SourcePosition
+import collection.mutable
+import ProtoTypes._
+import config.Printers
+import java.lang.AssertionError
+
+import dotty.tools.dotc.core.Names
+
+import scala.util.control.NonFatal
+
+/** Run by -Ycheck option after a given phase, this class retypes all syntax trees
+ * and verifies that the type of each tree node so obtained conforms to the type found in the tree node.
+ * It also performs the following checks:
+ *
+ * - The owner of each definition is the same as the owner of the current typing context.
+ * - Ident nodes do not refer to a denotation that would need a select to be accessible
+ * (see tpd.needsSelect).
+ * - After typer, identifiers and select nodes refer to terms only (all types should be
+ * represented as TypeTrees then).
+ */
+class TreeChecker extends Phase with SymTransformer {
+ import ast.tpd._
+
+
+ private val seenClasses = collection.mutable.HashMap[String, Symbol]()
+ private val seenModuleVals = collection.mutable.HashMap[String, Symbol]()
+
+ def isValidJVMName(name: Name) =
+ !name.exists(c => c == '.' || c == ';' || c =='[' || c == '/')
+
+ def isValidJVMMethodName(name: Name) =
+ !name.exists(c => c == '.' || c == ';' || c =='[' || c == '/' || c == '<' || c == '>')
+
+ def printError(str: String)(implicit ctx: Context) = {
+ ctx.echo(Console.RED + "[error] " + Console.WHITE + str)
+ }
+
+ val NoSuperClass = Trait | Package
+
+ def testDuplicate(sym: Symbol, registry: mutable.Map[String, Symbol], typ: String)(implicit ctx: Context) = {
+ val name = sym.fullName.toString
+ if (this.flatClasses && registry.contains(name))
+ printError(s"$typ defined twice $sym ${sym.id} ${registry(name).id}")
+ registry(name) = sym
+ }
+
+ def checkCompanion(symd: SymDenotation)(implicit ctx: Context): Unit = {
+ val cur = symd.linkedClass
+ val prev = ctx.atPhase(ctx.phase.prev) { implicit ctx =>
+ symd.symbol.linkedClass
+ }
+
+ if (prev.exists)
+ assert(cur.exists, i"companion disappeared from $symd")
+ }
+
+ def transformSym(symd: SymDenotation)(implicit ctx: Context): SymDenotation = {
+ val sym = symd.symbol
+
+ if (sym.isClass && !sym.isAbsent) {
+ val validSuperclass = sym.isPrimitiveValueClass || defn.syntheticCoreClasses.contains(sym) ||
+ (sym eq defn.ObjectClass) || (sym is NoSuperClass) || (sym.asClass.superClass.exists)
+ if (!validSuperclass)
+ printError(s"$sym has no superclass set")
+
+ testDuplicate(sym, seenClasses, "class")
+ }
+
+ if (sym.is(Method) && sym.is(Deferred) && sym.is(Private))
+ assert(false, s"$sym is both Deferred and Private")
+
+ checkCompanion(symd)
+
+ symd
+ }
+
+ def phaseName: String = "Ycheck"
+
+ def run(implicit ctx: Context): Unit = {
+ check(ctx.allPhases, ctx)
+ }
+
+ private def previousPhases(phases: List[Phase])(implicit ctx: Context): List[Phase] = phases match {
+ case (phase: TreeTransformer) :: phases1 =>
+ val subPhases = phase.miniPhases
+ val previousSubPhases = previousPhases(subPhases.toList)
+ if (previousSubPhases.length == subPhases.length) previousSubPhases ::: previousPhases(phases1)
+ else previousSubPhases
+ case phase :: phases1 if phase ne ctx.phase =>
+ phase :: previousPhases(phases1)
+ case _ =>
+ Nil
+ }
+
+ def check(phasesToRun: Seq[Phase], ctx: Context) = {
+ val prevPhase = ctx.phase.prev // can be a mini-phase
+ val squahsedPhase = ctx.squashed(prevPhase)
+ ctx.echo(s"checking ${ctx.compilationUnit} after phase ${squahsedPhase}")
+
+ val checkingCtx = ctx
+ .fresh
+ .setMode(Mode.ImplicitsEnabled)
+ .setReporter(new ThrowingReporter(ctx.reporter))
+
+ val checker = new Checker(previousPhases(phasesToRun.toList)(ctx))
+ try checker.typedExpr(ctx.compilationUnit.tpdTree)(checkingCtx)
+ catch {
+ case NonFatal(ex) => //TODO CHECK. Check that we are bootstrapped
+ implicit val ctx: Context = checkingCtx
+ println(i"*** error while checking ${ctx.compilationUnit} after phase ${checkingCtx.phase.prev} ***")
+ throw ex
+ }
+ }
+
+ class Checker(phasesToCheck: Seq[Phase]) extends ReTyper {
+
+ val nowDefinedSyms = new mutable.HashSet[Symbol]
+ val everDefinedSyms = new mutable.HashMap[Symbol, Tree]
+
+ def withDefinedSym[T](tree: untpd.Tree)(op: => T)(implicit ctx: Context): T = tree match {
+ case tree: DefTree =>
+ val sym = tree.symbol
+ assert(isValidJVMName(sym.name), s"${sym.fullName} name is invalid on jvm")
+ everDefinedSyms.get(sym) match {
+ case Some(t) =>
+ if (t ne tree)
+ ctx.warning(i"symbol ${sym.fullName} is defined at least twice in different parts of AST")
+ // should become an error
+ case None =>
+ everDefinedSyms(sym) = tree
+ }
+ assert(!nowDefinedSyms.contains(sym), i"doubly defined symbol: ${sym.fullName} in $tree")
+
+ if (ctx.settings.YcheckMods.value) {
+ tree match {
+ case t: MemberDef =>
+ if (t.name ne sym.name) ctx.warning(s"symbol ${sym.fullName} name doesn't correspond to AST: ${t}")
+ // todo: compare trees inside annotations
+ case _ =>
+ }
+ }
+
+ nowDefinedSyms += tree.symbol
+ //ctx.echo(i"defined: ${tree.symbol}")
+ val res = op
+ nowDefinedSyms -= tree.symbol
+ //ctx.echo(i"undefined: ${tree.symbol}")
+ res
+ case _ => op
+ }
+
+ def withDefinedSyms[T](trees: List[untpd.Tree])(op: => T)(implicit ctx: Context) =
+ trees.foldRightBN(op)(withDefinedSym(_)(_))
+
+ def withDefinedSymss[T](vparamss: List[List[untpd.ValDef]])(op: => T)(implicit ctx: Context): T =
+ vparamss.foldRightBN(op)(withDefinedSyms(_)(_))
+
+ def assertDefined(tree: untpd.Tree)(implicit ctx: Context) =
+ if (tree.symbol.maybeOwner.isTerm)
+ assert(nowDefinedSyms contains tree.symbol, i"undefined symbol ${tree.symbol}")
+
+ /** assert Java classes are not used as objects */
+ def assertIdentNotJavaClass(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case _ : untpd.Ident =>
+ assert(!tree.symbol.is(JavaModule), "Java class can't be used as value: " + tree)
+ case _ =>
+ }
+
+ /** check Java classes are not used as objects */
+ def checkIdentNotJavaClass(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ // case tree: untpd.Ident =>
+ // case tree: untpd.Select =>
+ // case tree: untpd.Bind =>
+ case vd : ValDef =>
+ assertIdentNotJavaClass(vd.forceIfLazy)
+ case dd : DefDef =>
+ assertIdentNotJavaClass(dd.forceIfLazy)
+ // case tree: untpd.TypeDef =>
+ case Apply(fun, args) =>
+ assertIdentNotJavaClass(fun)
+ args.foreach(assertIdentNotJavaClass _)
+ // case tree: untpd.This =>
+ // case tree: untpd.Literal =>
+ // case tree: untpd.New =>
+ case Typed(expr, _) =>
+ assertIdentNotJavaClass(expr)
+ case NamedArg(_, arg) =>
+ assertIdentNotJavaClass(arg)
+ case Assign(_, rhs) =>
+ assertIdentNotJavaClass(rhs)
+ case Block(stats, expr) =>
+ stats.foreach(assertIdentNotJavaClass _)
+ assertIdentNotJavaClass(expr)
+ case If(_, thenp, elsep) =>
+ assertIdentNotJavaClass(thenp)
+ assertIdentNotJavaClass(elsep)
+ // case tree: untpd.Closure =>
+ case Match(selector, cases) =>
+ assertIdentNotJavaClass(selector)
+ cases.foreach(caseDef => assertIdentNotJavaClass(caseDef.body))
+ case Return(expr, _) =>
+ assertIdentNotJavaClass(expr)
+ case Try(expr, cases, finalizer) =>
+ assertIdentNotJavaClass(expr)
+ cases.foreach(caseDef => assertIdentNotJavaClass(caseDef.body))
+ assertIdentNotJavaClass(finalizer)
+ // case tree: TypeApply =>
+ // case tree: Super =>
+ case SeqLiteral(elems, _) =>
+ elems.foreach(assertIdentNotJavaClass)
+ // case tree: TypeTree =>
+ // case tree: SingletonTypeTree =>
+ // case tree: AndTypeTree =>
+ // case tree: OrTypeTree =>
+ // case tree: RefinedTypeTree =>
+ // case tree: AppliedTypeTree =>
+ // case tree: ByNameTypeTree =>
+ // case tree: TypeBoundsTree =>
+ // case tree: Alternative =>
+ // case tree: PackageDef =>
+ case Annotated(arg, _) =>
+ assertIdentNotJavaClass(arg)
+ case _ =>
+ }
+
+ override def typed(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree = {
+ val tpdTree = super.typed(tree, pt)
+ checkIdentNotJavaClass(tpdTree)
+ tpdTree
+ }
+
+ override def typedUnadapted(tree: untpd.Tree, pt: Type)(implicit ctx: Context): tpd.Tree = {
+ val res = tree match {
+ case _: untpd.UnApply =>
+ // can't recheck patterns
+ tree.asInstanceOf[tpd.Tree]
+ case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[_] =>
+ super.typedUnadapted(tree)
+ case _ if tree.isType =>
+ promote(tree)
+ case _ =>
+ val tree1 = super.typedUnadapted(tree, pt)
+ def isSubType(tp1: Type, tp2: Type) =
+ (tp1 eq tp2) || // accept NoType / NoType
+ (tp1 <:< tp2)
+ def divergenceMsg(tp1: Type, tp2: Type) =
+ s"""Types differ
+ |Original type : ${tree.typeOpt.show}
+ |After checking: ${tree1.tpe.show}
+ |Original tree : ${tree.show}
+ |After checking: ${tree1.show}
+ |Why different :
+ """.stripMargin + core.TypeComparer.explained((tp1 <:< tp2)(_))
+ if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted
+ assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt))
+ tree1
+ }
+ checkNoOrphans(res.tpe)
+ phasesToCheck.foreach(_.checkPostCondition(res))
+ res
+ }
+
+ /** Check that PolyParams and MethodParams refer to an enclosing type */
+ def checkNoOrphans(tp: Type)(implicit ctx: Context) = new TypeMap() {
+ val definedBinders = mutable.Set[Type]()
+ def apply(tp: Type): Type = {
+ tp match {
+ case tp: BindingType =>
+ definedBinders += tp
+ mapOver(tp)
+ definedBinders -= tp
+ case tp: ParamType =>
+ assert(definedBinders.contains(tp.binder), s"orphan param: $tp")
+ case tp: TypeVar =>
+ apply(tp.underlying)
+ case _ =>
+ mapOver(tp)
+ }
+ tp
+ }
+ }.apply(tp)
+
+ def checkNotRepeated(tree: Tree)(implicit ctx: Context): tree.type = {
+ def allowedRepeated = (tree.symbol.flags is Case) && tree.tpe.widen.isRepeatedParam
+
+ assert(!tree.tpe.widen.isRepeatedParam || allowedRepeated, i"repeated parameter type not allowed here: $tree")
+ tree
+ }
+
+ /** Check that all methods have MethodicType */
+ def isMethodType(pt: Type)(implicit ctx: Context): Boolean = pt match {
+ case at: AnnotatedType => isMethodType(at.tpe)
+ case _: MethodicType => true // MethodType, ExprType, PolyType
+ case _ => false
+ }
+
+ override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree = {
+ assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase)
+ assert(tree.isType || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}")
+ assertDefined(tree)
+
+ checkNotRepeated(super.typedIdent(tree, pt))
+ }
+
+ /** Makes sure the symbol in the tree can be approximately reconstructed by
+ * calling `member` on the qualifier type.
+ * Approximately means: The two symbols might be different but one still overrides the other.
+ */
+ override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
+ assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase)
+ val tpe = tree.typeOpt
+ val sym = tree.symbol
+ if (!tpe.isInstanceOf[WithFixedSym] && sym.exists && !sym.is(Private)) {
+ val qualTpe = tree.qualifier.typeOpt
+ val member =
+ if (sym.is(Private)) qualTpe.member(tree.name)
+ else qualTpe.nonPrivateMember(tree.name)
+ val memberSyms = member.alternatives.map(_.symbol)
+ assert(memberSyms.exists(mbr =>
+ sym == mbr ||
+ sym.overriddenSymbol(mbr.owner.asClass) == mbr ||
+ mbr.overriddenSymbol(sym.owner.asClass) == sym),
+ ex"""symbols differ for $tree
+ |was : $sym
+ |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, %
+ |qualifier type : ${tree.qualifier.typeOpt}
+ |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""")
+ }
+ checkNotRepeated(super.typedSelect(tree, pt))
+ }
+
+ override def typedThis(tree: untpd.This)(implicit ctx: Context) = {
+ val res = super.typedThis(tree)
+ val cls = res.symbol
+ assert(cls.isStaticOwner || ctx.owner.isContainedIn(cls), i"error while typing $tree, ${ctx.owner} is not contained in $cls")
+ res
+ }
+
+ private def checkOwner(tree: untpd.Tree)(implicit ctx: Context): Unit = {
+ def ownerMatches(symOwner: Symbol, ctxOwner: Symbol): Boolean =
+ symOwner == ctxOwner ||
+ ctxOwner.isWeakOwner && ownerMatches(symOwner, ctxOwner.owner) ||
+ ctx.phase.labelsReordered && symOwner.isWeakOwner && ownerMatches(symOwner.owner, ctxOwner)
+ assert(ownerMatches(tree.symbol.owner, ctx.owner),
+ i"bad owner; ${tree.symbol} has owner ${tree.symbol.owner}, expected was ${ctx.owner}\n" +
+ i"owner chain = ${tree.symbol.ownersIterator.toList}%, %, ctxOwners = ${ctx.outersIterator.map(_.owner).toList}%, %")
+ }
+
+ override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(implicit ctx: Context) = {
+ val TypeDef(_, impl @ Template(constr, _, _, _)) = cdef
+ assert(cdef.symbol == cls)
+ assert(impl.symbol.owner == cls)
+ assert(constr.symbol.owner == cls)
+ assert(cls.primaryConstructor == constr.symbol, i"mismatch, primary constructor ${cls.primaryConstructor}, in tree = ${constr.symbol}")
+ checkOwner(impl)
+ checkOwner(impl.constr)
+
+ def isNonMagicalMethod(x: Symbol) =
+ x.is(Method) &&
+ !x.isCompanionMethod &&
+ !x.isValueClassConvertMethod
+
+ val symbolsNotDefined = cls.classInfo.decls.toSet.filter(isNonMagicalMethod) -- impl.body.map(_.symbol) - constr.symbol
+
+ assert(symbolsNotDefined.isEmpty,
+ i" $cls tree does not define methods: ${symbolsNotDefined.toList}%, %\n" +
+ i"expected: ${cls.classInfo.decls.toSet.filter(isNonMagicalMethod).toList}%, %\n" +
+ i"defined: ${impl.body.map(_.symbol)}%, %")
+
+ super.typedClassDef(cdef, cls)
+ }
+
+ override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) =
+ withDefinedSyms(ddef.tparams) {
+ withDefinedSymss(ddef.vparamss) {
+ if (!sym.isClassConstructor && !(sym.name eq Names.STATIC_CONSTRUCTOR)) assert(isValidJVMMethodName(sym.name), s"${sym.fullName} name is invalid on jvm")
+ val tpdTree = super.typedDefDef(ddef, sym)
+ assert(isMethodType(sym.info), i"wrong type, expect a method type for ${sym.fullName}, but found: ${sym.info}")
+ tpdTree
+ }
+ }
+
+ override def typedCase(tree: untpd.CaseDef, pt: Type, selType: Type, gadtSyms: Set[Symbol])(implicit ctx: Context): CaseDef = {
+ withDefinedSyms(tree.pat.asInstanceOf[tpd.Tree].filterSubTrees(_.isInstanceOf[ast.Trees.Bind[_]])) {
+ super.typedCase(tree, pt, selType, gadtSyms)
+ }
+ }
+
+ override def typedBlock(tree: untpd.Block, pt: Type)(implicit ctx: Context) =
+ withDefinedSyms(tree.stats) { super.typedBlock(tree, pt) }
+
+ override def typedInlined(tree: untpd.Inlined, pt: Type)(implicit ctx: Context) =
+ withDefinedSyms(tree.bindings) { super.typedInlined(tree, pt) }
+
+ /** Check that all defined symbols have legal owners.
+ * An owner is legal if it is either the same as the context's owner
+ * or there's an owner chain of valdefs starting at the context's owner and
+ * reaching up to the symbol's owner. The reason for this relaxed matching
+ * is that we should be able to pull out an expression as an initializer
+ * of a helper value without having to do a change owner traversal of the expression.
+ */
+ override def typedStats(trees: List[untpd.Tree], exprOwner: Symbol)(implicit ctx: Context): List[Tree] = {
+ for (tree <- trees) tree match {
+ case tree: untpd.DefTree => checkOwner(tree)
+ case _: untpd.Thicket => assert(false, i"unexpanded thicket $tree in statement sequence $trees%\n%")
+ case _ =>
+ }
+ super.typedStats(trees, exprOwner)
+ }
+
+ override def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol], forcedDefined: Boolean = false)(implicit ctx: Context): Tree =
+ tree
+
+ override def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = {
+ def isPrimaryConstructorReturn =
+ ctx.owner.isPrimaryConstructor && pt.isRef(ctx.owner.owner) && tree.tpe.isRef(defn.UnitClass)
+ if (ctx.mode.isExpr &&
+ !tree.isEmpty &&
+ !isPrimaryConstructorReturn &&
+ !pt.isInstanceOf[FunProto])
+ assert(tree.tpe <:< pt, {
+ val mismatch = err.typeMismatchMsg(tree.tpe, pt)
+ i"""|${mismatch.msg}
+ |tree = $tree""".stripMargin
+ })
+ tree
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala
new file mode 100644
index 000000000..7a5c5df9d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala
@@ -0,0 +1,48 @@
+package dotty.tools.dotc
+package transform
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Flags._, Trees._, Types._, StdNames._, Symbols._
+import ValueClasses._
+
+object TreeExtractors {
+ import tpd._
+
+ /** Match arg1.op(arg2) and extract (arg1, op.symbol, arg2) */
+ object BinaryOp {
+ def unapply(t: Tree)(implicit ctx: Context): Option[(Tree, Symbol, Tree)] = t match {
+ case Apply(sel @ Select(arg1, _), List(arg2)) =>
+ Some((arg1, sel.symbol, arg2))
+ case _ =>
+ None
+ }
+ }
+
+ /** Match new C(args) and extract (C, args) */
+ object NewWithArgs {
+ def unapply(t: Tree)(implicit ctx: Context): Option[(Type, List[Tree])] = t match {
+ case Apply(Select(New(_), nme.CONSTRUCTOR), args) =>
+ Some((t.tpe, args))
+ case _ =>
+ None
+ }
+ }
+
+ /** For an instance v of a value class like:
+ * class V(val underlying: X) extends AnyVal
+ * Match v.underlying() and extract v
+ */
+ object ValueClassUnbox {
+ def unapply(t: Tree)(implicit ctx: Context): Option[Tree] = t match {
+ case Apply(sel @ Select(ref, _), Nil) =>
+ val d = ref.tpe.widenDealias.typeSymbol.denot
+ if (isDerivedValueClass(d) && (sel.symbol eq valueClassUnbox(d.asClass))) {
+ Some(ref)
+ } else
+ None
+ case _ =>
+ None
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeGen.scala b/compiler/src/dotty/tools/dotc/transform/TreeGen.scala
new file mode 100644
index 000000000..7e507d905
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TreeGen.scala
@@ -0,0 +1,26 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Symbols._, Contexts._, Types._, Names._, StdNames._
+import ast._
+import Trees._
+import TypeUtils._
+
+object TreeGen {
+
+ import tpd._
+
+ def wrapArrayMethodName(elemtp: Type)(implicit ctx: Context): TermName = {
+ val elemCls = elemtp.classSymbol
+ if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name)
+ else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isPhantomClass) nme.wrapRefArray
+ else nme.genericWrapArray
+ }
+
+ def wrapArray(tree: Tree, elemtp: Type)(implicit ctx: Context): Tree =
+ ref(defn.ScalaPredefModule)
+ .select(wrapArrayMethodName(elemtp))
+ .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil)
+ .appliedTo(tree)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala b/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala
new file mode 100644
index 000000000..5385ca720
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala
@@ -0,0 +1,1221 @@
+package dotty.tools
+package dotc
+package transform
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.DenotTransformers.{InfoTransformer, DenotTransformer}
+import dotty.tools.dotc.core.Denotations.SingleDenotation
+import dotty.tools.dotc.core.Phases.Phase
+import dotty.tools.dotc.core.SymDenotations.SymDenotation
+import dotty.tools.dotc.core.Symbols.Symbol
+import dotty.tools.dotc.core.Flags.PackageVal
+import dotty.tools.dotc.core.Mode
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.util.DotClass
+import scala.annotation.tailrec
+import config.Printers.transforms
+import scala.util.control.NonFatal
+
+object TreeTransforms {
+ import tpd._
+
+ /** The base class of tree transforms. For each kind of tree K, there are
+ * two methods which can be overridden:
+ *
+ * prepareForK // return a new TreeTransform which gets applied to the K
+ * // node and its children
+ * transformK // transform node of type K
+ *
+ * If a transform does not need to visit a node or any of its children, it
+ * signals this fact by returning a NoTransform from a prepare method.
+ *
+ * If all transforms in a group are NoTransforms, the tree is no longer traversed.
+ *
+ *
+ * Performance analysis: Taking the dotty compiler frontend as a use case, we are aiming for a warm performance of
+ * about 4000 lines / sec. This means 6 seconds for a codebase of 24'000 lines. Of these the frontend consumes
+ * over 2.5 seconds, erasure and code generation will most likely consume over 1 second each. So we would have
+ * about 1 sec for all other transformations in our budget. Of this second, let's assume a maximum of 20% for
+ * the general dispatch overhead as opposed to the concrete work done in transformations. So that leaves us with
+ * 0.2sec, or roughly 600M processor cycles.
+ *
+ * Now, to the amount of work that needs to be done. The codebase produces an average of about 250'000 trees after typechecking.
+ * Transformations are likely to make this bigger so let's assume 300K trees on average. We estimate to have about 100
+ * micro-transformations. Let's say 5 transformation groups of 20 micro-transformations each. (by comparison,
+ * scalac has in excess of 20 phases, and most phases do multiple transformations). There are then 30M visits
+ * of a node by a transformation. Each visit has a budget of 20 processor cycles.
+ *
+ * A more detailed breakdown: I assume that about one third of all transformations have real work to do for each node.
+ * This might look high, but keep in mind that the most common nodes are Idents and Selects, and most transformations
+ * touch these. By contrast the amount of work for generating new transformations should be negligible.
+ *
+ * So, in 400 clock cycles we need to (1) perform a pattern match according to the type of node, (2) generate new
+ * transformations if applicable, (3) reconstitute the tree node from the result of transforming the children, and
+ * (4) chain 7 out of 20 transformations over the resulting tree node. I believe the current algorithm is suitable
+ * for achieving this goal, but there can be no wasted cycles anywhere.
+ */
+ abstract class TreeTransform extends DotClass {
+
+ def phase: MiniPhase
+
+ def treeTransformPhase: Phase = phase.next
+
+ def prepareForIdent(tree: Ident)(implicit ctx: Context) = this
+ def prepareForSelect(tree: Select)(implicit ctx: Context) = this
+ def prepareForThis(tree: This)(implicit ctx: Context) = this
+ def prepareForSuper(tree: Super)(implicit ctx: Context) = this
+ def prepareForApply(tree: Apply)(implicit ctx: Context) = this
+ def prepareForTypeApply(tree: TypeApply)(implicit ctx: Context) = this
+ def prepareForLiteral(tree: Literal)(implicit ctx: Context) = this
+ def prepareForNew(tree: New)(implicit ctx: Context) = this
+ def prepareForTyped(tree: Typed)(implicit ctx: Context) = this
+ def prepareForAssign(tree: Assign)(implicit ctx: Context) = this
+ def prepareForBlock(tree: Block)(implicit ctx: Context) = this
+ def prepareForIf(tree: If)(implicit ctx: Context) = this
+ def prepareForClosure(tree: Closure)(implicit ctx: Context) = this
+ def prepareForMatch(tree: Match)(implicit ctx: Context) = this
+ def prepareForCaseDef(tree: CaseDef)(implicit ctx: Context) = this
+ def prepareForReturn(tree: Return)(implicit ctx: Context) = this
+ def prepareForTry(tree: Try)(implicit ctx: Context) = this
+ def prepareForSeqLiteral(tree: SeqLiteral)(implicit ctx: Context) = this
+ def prepareForInlined(tree: Inlined)(implicit ctx: Context) = this
+ def prepareForTypeTree(tree: TypeTree)(implicit ctx: Context) = this
+ def prepareForBind(tree: Bind)(implicit ctx: Context) = this
+ def prepareForAlternative(tree: Alternative)(implicit ctx: Context) = this
+ def prepareForTypeDef(tree: TypeDef)(implicit ctx: Context) = this
+ def prepareForUnApply(tree: UnApply)(implicit ctx: Context) = this
+ def prepareForValDef(tree: ValDef)(implicit ctx: Context) = this
+ def prepareForDefDef(tree: DefDef)(implicit ctx: Context) = this
+ def prepareForTemplate(tree: Template)(implicit ctx: Context) = this
+ def prepareForPackageDef(tree: PackageDef)(implicit ctx: Context) = this
+ def prepareForStats(trees: List[Tree])(implicit ctx: Context) = this
+
+ def prepareForUnit(tree: Tree)(implicit ctx: Context) = this
+
+ def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformThis(tree: This)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformSuper(tree: Super)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformLiteral(tree: Literal)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformNew(tree: New)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformTyped(tree: Typed)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformAssign(tree: Assign)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformBlock(tree: Block)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformIf(tree: If)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformClosure(tree: Closure)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformMatch(tree: Match)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformCaseDef(tree: CaseDef)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformReturn(tree: Return)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformTry(tree: Try)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformSeqLiteral(tree: SeqLiteral)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformInlined(tree: Inlined)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformTypeTree(tree: TypeTree)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformBind(tree: Bind)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformAlternative(tree: Alternative)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformUnApply(tree: UnApply)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformValDef(tree: ValDef)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformTypeDef(tree: TypeDef)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformPackageDef(tree: PackageDef)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformStats(trees: List[Tree])(implicit ctx: Context, info: TransformerInfo): List[Tree] = trees
+ def transformOther(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+
+ def transformUnit(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+
+ /** Transform tree using all transforms of current group (including this one) */
+ def transform(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = info.group.transform(tree, info, 0)
+
+ /** Transform subtree using all transforms following the current one in this group */
+ def transformFollowingDeep(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = info.group.transform(tree, info, phase.idx + 1)
+
+ /** Transform single node using all transforms following the current one in this group */
+ def transformFollowing(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = info.group.transformSingle(tree, phase.idx + 1)
+
+ def atGroupEnd[T](action : Context => T)(implicit ctx: Context, info: TransformerInfo) = {
+ val last = info.transformers(info.transformers.length - 1)
+ action(ctx.withPhase(last.phase.next))
+ }
+ }
+
+ /** A phase that defines a TreeTransform to be used in a group */
+ trait MiniPhase extends Phase { thisPhase =>
+ def treeTransform: TreeTransform
+
+ /** id of this mini phase in group */
+ var idx: Int = _
+
+ /** List of names of phases that should have finished their processing of all compilation units
+ * before this phase starts
+ */
+ def runsAfterGroupsOf: Set[Class[_ <: Phase]] = Set.empty
+
+ protected def mkTreeTransformer = new TreeTransformer {
+ override def phaseName: String = thisPhase.phaseName
+ override def miniPhases = Array(thisPhase)
+ }
+
+ override def run(implicit ctx: Context): Unit = {
+ mkTreeTransformer.run
+ }
+ }
+
+ /** A mini phase that is its own tree transform */
+ abstract class MiniPhaseTransform extends TreeTransform with MiniPhase {
+ def treeTransform = this
+ def phase = this
+ }
+
+ /** A helper trait to transform annotations on MemberDefs */
+ trait AnnotationTransformer extends MiniPhaseTransform with DenotTransformer {
+
+ val annotationTransformer = mkTreeTransformer
+ override final def treeTransformPhase = this
+ // need to run at own phase because otherwise we get ahead of ourselves in transforming denotations
+
+ abstract override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation =
+ super.transform(ref) match {
+ case ref1: SymDenotation if ref1.symbol.isDefinedInCurrentRun =>
+ val annotTrees = ref1.annotations.map(_.tree)
+ val annotTrees1 = annotTrees.mapConserve(annotationTransformer.macroTransform)
+ if (annotTrees eq annotTrees1) ref1
+ else ref1.copySymDenotation(annotations = annotTrees1.map(new ConcreteAnnotation(_)))
+ case ref1 =>
+ ref1
+ }
+ }
+
+ @sharable val NoTransform = new TreeTransform {
+ def phase = unsupported("phase")
+ }
+
+ type Mutator[T] = (TreeTransform, T, Context) => TreeTransform
+
+ class TransformerInfo(val transformers: Array[TreeTransform], val nx: NXTransformations, val group: TreeTransformer)
+
+ /** This class maintains track of which methods are redefined in MiniPhases and creates execution plans for transformXXX and prepareXXX
+ * Thanks to Martin for this idea
+ * @see NXTransformations.index for format of plan
+ */
+ class NXTransformations {
+
+ private def hasRedefinedMethod(cls: Class[_], name: String): Boolean =
+ if (cls.getDeclaredMethods.exists(_.getName == name)) cls != classOf[TreeTransform]
+ else hasRedefinedMethod(cls.getSuperclass, name)
+
+ /** Create an index array `next` of size one larger than the size of `transforms` such that
+ * for each index i, `next(i)` is the smallest index j such that
+ *
+ * i <= j
+ * j == transforms.length || transform(j) defines a non-default method with given `name`
+ */
+ private def index(transformations: Array[Class[_]], name: String): Array[Int] = {
+ val len = transformations.length
+ val next = new Array[Int](len + 1)
+ var nextTransform: Int = len
+
+ /* loop invariant: nextTransform == the smallest j such that
+ * i < j and
+ * j == transforms.length || transform(j) defines a non-default method with given `name`
+ */
+ next(len) = len
+ var i = len - 1
+ while (i >= 0) {
+ // update nextTransform if this phase redefines the method
+ if (hasRedefinedMethod(transformations(i), name)) {
+ nextTransform = i
+ }
+ next(i) = nextTransform
+ i -= 1
+ }
+ next
+ }
+
+ private def indexUpdate(prev: Array[Int], changedTransformation: Class[_], index: Int, name: String, copy: Boolean = true) = {
+ val isDefinedNow = hasRedefinedMethod(changedTransformation, name)
+ val wasDefinedBefore = prev(index) == index
+ if (isDefinedNow == wasDefinedBefore) prev
+ else {
+ val result = if (copy) prev.clone() else prev
+ val oldValue = result(index)
+ val newValue =
+ if (wasDefinedBefore /* && !isDefinedNow */ ) prev(index + 1)
+ else index // isDefinedNow
+ var i = index
+ while (i >= 0 && result(i) == oldValue) {
+ result(i) = newValue
+ i -= 1
+ }
+ result
+ }
+ }
+
+ def this(transformations: Array[Class[_]]) = {
+ this()
+ nxPrepIdent = index(transformations, "prepareForIdent")
+ nxPrepSelect = index(transformations, "prepareForSelect")
+ nxPrepThis = index(transformations, "prepareForThis")
+ nxPrepSuper = index(transformations, "prepareForSuper")
+ nxPrepApply = index(transformations, "prepareForApply")
+ nxPrepTypeApply = index(transformations, "prepareForTypeApply")
+ nxPrepLiteral = index(transformations, "prepareForLiteral")
+ nxPrepNew = index(transformations, "prepareForNew")
+ nxPrepTyped = index(transformations, "prepareForTyped")
+ nxPrepAssign = index(transformations, "prepareForAssign")
+ nxPrepBlock = index(transformations, "prepareForBlock")
+ nxPrepIf = index(transformations, "prepareForIf")
+ nxPrepClosure = index(transformations, "prepareForClosure")
+ nxPrepCaseDef = index(transformations, "prepareForCaseDef")
+ nxPrepMatch = index(transformations, "prepareForMatch")
+ nxPrepReturn = index(transformations, "prepareForReturn")
+ nxPrepTry = index(transformations, "prepareForTry")
+ nxPrepSeqLiteral = index(transformations, "prepareForSeqLiteral")
+ nxPrepInlined = index(transformations, "prepareForInlined")
+ nxPrepTypeTree = index(transformations, "prepareForTypeTree")
+ nxPrepBind = index(transformations, "prepareForBind")
+ nxPrepAlternative = index(transformations, "prepareForAlternative")
+ nxPrepUnApply = index(transformations, "prepareForUnApply")
+ nxPrepValDef = index(transformations, "prepareForValDef")
+ nxPrepDefDef = index(transformations, "prepareForDefDef")
+ nxPrepTypeDef = index(transformations, "prepareForTypeDef")
+ nxPrepTemplate = index(transformations, "prepareForTemplate")
+ nxPrepPackageDef = index(transformations, "prepareForPackageDef")
+ nxPrepStats = index(transformations, "prepareForStats")
+ nxPrepUnit = index(transformations, "prepareForUnit")
+
+ nxTransIdent = index(transformations, "transformIdent")
+ nxTransSelect = index(transformations, "transformSelect")
+ nxTransThis = index(transformations, "transformThis")
+ nxTransSuper = index(transformations, "transformSuper")
+ nxTransApply = index(transformations, "transformApply")
+ nxTransTypeApply = index(transformations, "transformTypeApply")
+ nxTransLiteral = index(transformations, "transformLiteral")
+ nxTransNew = index(transformations, "transformNew")
+ nxTransTyped = index(transformations, "transformTyped")
+ nxTransAssign = index(transformations, "transformAssign")
+ nxTransBlock = index(transformations, "transformBlock")
+ nxTransIf = index(transformations, "transformIf")
+ nxTransClosure = index(transformations, "transformClosure")
+ nxTransMatch = index(transformations, "transformMatch")
+ nxTransCaseDef = index(transformations, "transformCaseDef")
+ nxTransReturn = index(transformations, "transformReturn")
+ nxTransTry = index(transformations, "transformTry")
+ nxTransSeqLiteral = index(transformations, "transformSeqLiteral")
+ nxTransInlined = index(transformations, "transformInlined")
+ nxTransTypeTree = index(transformations, "transformTypeTree")
+ nxTransBind = index(transformations, "transformBind")
+ nxTransAlternative = index(transformations, "transformAlternative")
+ nxTransUnApply = index(transformations, "transformUnApply")
+ nxTransValDef = index(transformations, "transformValDef")
+ nxTransDefDef = index(transformations, "transformDefDef")
+ nxTransTypeDef = index(transformations, "transformTypeDef")
+ nxTransTemplate = index(transformations, "transformTemplate")
+ nxTransPackageDef = index(transformations, "transformPackageDef")
+ nxTransStats = index(transformations, "transformStats")
+ nxTransUnit = index(transformations, "transformUnit")
+ nxTransOther = index(transformations, "transformOther")
+ }
+
+ def this(transformations: Array[TreeTransform]) = {
+ this(transformations.map(_.getClass).asInstanceOf[Array[Class[_]]])
+ }
+
+ def this(prev: NXTransformations, changedTransformation: TreeTransform, transformationIndex: Int, reuse: Boolean = false) = {
+ this()
+ val copy = !reuse
+ val changedTransformationClass = changedTransformation.getClass
+ nxPrepIdent = indexUpdate(prev.nxPrepIdent, changedTransformationClass, transformationIndex, "prepareForIdent", copy)
+ nxPrepSelect = indexUpdate(prev.nxPrepSelect, changedTransformationClass, transformationIndex, "prepareForSelect", copy)
+ nxPrepThis = indexUpdate(prev.nxPrepThis, changedTransformationClass, transformationIndex, "prepareForThis", copy)
+ nxPrepSuper = indexUpdate(prev.nxPrepSuper, changedTransformationClass, transformationIndex, "prepareForSuper", copy)
+ nxPrepApply = indexUpdate(prev.nxPrepApply, changedTransformationClass, transformationIndex, "prepareForApply", copy)
+ nxPrepTypeApply = indexUpdate(prev.nxPrepTypeApply, changedTransformationClass, transformationIndex, "prepareForTypeApply", copy)
+ nxPrepLiteral = indexUpdate(prev.nxPrepLiteral, changedTransformationClass, transformationIndex, "prepareForLiteral", copy)
+ nxPrepNew = indexUpdate(prev.nxPrepNew, changedTransformationClass, transformationIndex, "prepareForNew", copy)
+ nxPrepTyped = indexUpdate(prev.nxPrepTyped, changedTransformationClass, transformationIndex, "prepareForTyped", copy)
+ nxPrepAssign = indexUpdate(prev.nxPrepAssign, changedTransformationClass, transformationIndex, "prepareForAssign", copy)
+ nxPrepBlock = indexUpdate(prev.nxPrepBlock, changedTransformationClass, transformationIndex, "prepareForBlock", copy)
+ nxPrepIf = indexUpdate(prev.nxPrepIf, changedTransformationClass, transformationIndex, "prepareForIf", copy)
+ nxPrepClosure = indexUpdate(prev.nxPrepClosure, changedTransformationClass, transformationIndex, "prepareForClosure", copy)
+ nxPrepMatch = indexUpdate(prev.nxPrepMatch, changedTransformationClass, transformationIndex, "prepareForMatch", copy)
+ nxPrepCaseDef = indexUpdate(prev.nxPrepCaseDef, changedTransformationClass, transformationIndex, "prepareForCaseDef", copy)
+ nxPrepReturn = indexUpdate(prev.nxPrepReturn, changedTransformationClass, transformationIndex, "prepareForReturn", copy)
+ nxPrepTry = indexUpdate(prev.nxPrepTry, changedTransformationClass, transformationIndex, "prepareForTry", copy)
+ nxPrepSeqLiteral = indexUpdate(prev.nxPrepSeqLiteral, changedTransformationClass, transformationIndex, "prepareForSeqLiteral", copy)
+ nxPrepInlined = indexUpdate(prev.nxPrepInlined, changedTransformationClass, transformationIndex, "prepareForInlined", copy)
+ nxPrepTypeTree = indexUpdate(prev.nxPrepTypeTree, changedTransformationClass, transformationIndex, "prepareForTypeTree", copy)
+ nxPrepBind = indexUpdate(prev.nxPrepBind, changedTransformationClass, transformationIndex, "prepareForBind", copy)
+ nxPrepAlternative = indexUpdate(prev.nxPrepAlternative, changedTransformationClass, transformationIndex, "prepareForAlternative", copy)
+ nxPrepUnApply = indexUpdate(prev.nxPrepUnApply, changedTransformationClass, transformationIndex, "prepareForUnApply", copy)
+ nxPrepValDef = indexUpdate(prev.nxPrepValDef, changedTransformationClass, transformationIndex, "prepareForValDef", copy)
+ nxPrepDefDef = indexUpdate(prev.nxPrepDefDef, changedTransformationClass, transformationIndex, "prepareForDefDef", copy)
+ nxPrepTypeDef = indexUpdate(prev.nxPrepTypeDef, changedTransformationClass, transformationIndex, "prepareForTypeDef", copy)
+ nxPrepTemplate = indexUpdate(prev.nxPrepTemplate, changedTransformationClass, transformationIndex, "prepareForTemplate", copy)
+ nxPrepPackageDef = indexUpdate(prev.nxPrepPackageDef, changedTransformationClass, transformationIndex, "prepareForPackageDef", copy)
+ nxPrepStats = indexUpdate(prev.nxPrepStats, changedTransformationClass, transformationIndex, "prepareForStats", copy)
+
+ nxTransIdent = indexUpdate(prev.nxTransIdent, changedTransformationClass, transformationIndex, "transformIdent", copy)
+ nxTransSelect = indexUpdate(prev.nxTransSelect, changedTransformationClass, transformationIndex, "transformSelect", copy)
+ nxTransThis = indexUpdate(prev.nxTransThis, changedTransformationClass, transformationIndex, "transformThis", copy)
+ nxTransSuper = indexUpdate(prev.nxTransSuper, changedTransformationClass, transformationIndex, "transformSuper", copy)
+ nxTransApply = indexUpdate(prev.nxTransApply, changedTransformationClass, transformationIndex, "transformApply", copy)
+ nxTransTypeApply = indexUpdate(prev.nxTransTypeApply, changedTransformationClass, transformationIndex, "transformTypeApply", copy)
+ nxTransLiteral = indexUpdate(prev.nxTransLiteral, changedTransformationClass, transformationIndex, "transformLiteral", copy)
+ nxTransNew = indexUpdate(prev.nxTransNew, changedTransformationClass, transformationIndex, "transformNew", copy)
+ nxTransTyped = indexUpdate(prev.nxTransTyped, changedTransformationClass, transformationIndex, "transformTyped", copy)
+ nxTransAssign = indexUpdate(prev.nxTransAssign, changedTransformationClass, transformationIndex, "transformAssign", copy)
+ nxTransBlock = indexUpdate(prev.nxTransBlock, changedTransformationClass, transformationIndex, "transformBlock", copy)
+ nxTransIf = indexUpdate(prev.nxTransIf, changedTransformationClass, transformationIndex, "transformIf", copy)
+ nxTransClosure = indexUpdate(prev.nxTransClosure, changedTransformationClass, transformationIndex, "transformClosure", copy)
+ nxTransMatch = indexUpdate(prev.nxTransMatch, changedTransformationClass, transformationIndex, "transformMatch", copy)
+ nxTransCaseDef = indexUpdate(prev.nxTransCaseDef, changedTransformationClass, transformationIndex, "transformCaseDef", copy)
+ nxTransReturn = indexUpdate(prev.nxTransReturn, changedTransformationClass, transformationIndex, "transformReturn", copy)
+ nxTransTry = indexUpdate(prev.nxTransTry, changedTransformationClass, transformationIndex, "transformTry", copy)
+ nxTransSeqLiteral = indexUpdate(prev.nxTransSeqLiteral, changedTransformationClass, transformationIndex, "transformSeqLiteral", copy)
+ nxTransInlined = indexUpdate(prev.nxTransInlined, changedTransformationClass, transformationIndex, "transformInlined", copy)
+ nxTransTypeTree = indexUpdate(prev.nxTransTypeTree, changedTransformationClass, transformationIndex, "transformTypeTree", copy)
+ nxTransBind = indexUpdate(prev.nxTransBind, changedTransformationClass, transformationIndex, "transformBind", copy)
+ nxTransAlternative = indexUpdate(prev.nxTransAlternative, changedTransformationClass, transformationIndex, "transformAlternative", copy)
+ nxTransUnApply = indexUpdate(prev.nxTransUnApply, changedTransformationClass, transformationIndex, "transformUnApply", copy)
+ nxTransValDef = indexUpdate(prev.nxTransValDef, changedTransformationClass, transformationIndex, "transformValDef", copy)
+ nxTransDefDef = indexUpdate(prev.nxTransDefDef, changedTransformationClass, transformationIndex, "transformDefDef", copy)
+ nxTransTypeDef = indexUpdate(prev.nxTransTypeDef, changedTransformationClass, transformationIndex, "transformTypeDef", copy)
+ nxTransTemplate = indexUpdate(prev.nxTransTemplate, changedTransformationClass, transformationIndex, "transformTemplate", copy)
+ nxTransPackageDef = indexUpdate(prev.nxTransPackageDef, changedTransformationClass, transformationIndex, "transformPackageDef", copy)
+ nxTransStats = indexUpdate(prev.nxTransStats, changedTransformationClass, transformationIndex, "transformStats", copy)
+ nxTransOther = indexUpdate(prev.nxTransOther, changedTransformationClass, transformationIndex, "transformOther", copy)
+ }
+
+ /** Those arrays are used as "execution plan" in order to only execute non-trivial transformations\preparations
+ * for every integer i array(i) contains first non trivial transformation\preparation on particular tree subtype.
+ * If no nontrivial transformation are left stored value is greater than transformers.size
+ */
+ var nxPrepIdent: Array[Int] = _
+ var nxPrepSelect: Array[Int] = _
+ var nxPrepThis: Array[Int] = _
+ var nxPrepSuper: Array[Int] = _
+ var nxPrepApply: Array[Int] = _
+ var nxPrepTypeApply: Array[Int] = _
+ var nxPrepLiteral: Array[Int] = _
+ var nxPrepNew: Array[Int] = _
+ var nxPrepTyped: Array[Int] = _
+ var nxPrepAssign: Array[Int] = _
+ var nxPrepBlock: Array[Int] = _
+ var nxPrepIf: Array[Int] = _
+ var nxPrepClosure: Array[Int] = _
+ var nxPrepMatch: Array[Int] = _
+ var nxPrepCaseDef: Array[Int] = _
+ var nxPrepReturn: Array[Int] = _
+ var nxPrepTry: Array[Int] = _
+ var nxPrepSeqLiteral: Array[Int] = _
+ var nxPrepInlined: Array[Int] = _
+ var nxPrepTypeTree: Array[Int] = _
+ var nxPrepBind: Array[Int] = _
+ var nxPrepAlternative: Array[Int] = _
+ var nxPrepUnApply: Array[Int] = _
+ var nxPrepValDef: Array[Int] = _
+ var nxPrepDefDef: Array[Int] = _
+ var nxPrepTypeDef: Array[Int] = _
+ var nxPrepTemplate: Array[Int] = _
+ var nxPrepPackageDef: Array[Int] = _
+ var nxPrepStats: Array[Int] = _
+ var nxPrepUnit: Array[Int] = _
+
+ var nxTransIdent: Array[Int] = _
+ var nxTransSelect: Array[Int] = _
+ var nxTransThis: Array[Int] = _
+ var nxTransSuper: Array[Int] = _
+ var nxTransApply: Array[Int] = _
+ var nxTransTypeApply: Array[Int] = _
+ var nxTransLiteral: Array[Int] = _
+ var nxTransNew: Array[Int] = _
+ var nxTransTyped: Array[Int] = _
+ var nxTransAssign: Array[Int] = _
+ var nxTransBlock: Array[Int] = _
+ var nxTransIf: Array[Int] = _
+ var nxTransClosure: Array[Int] = _
+ var nxTransMatch: Array[Int] = _
+ var nxTransCaseDef: Array[Int] = _
+ var nxTransReturn: Array[Int] = _
+ var nxTransTry: Array[Int] = _
+ var nxTransSeqLiteral: Array[Int] = _
+ var nxTransInlined: Array[Int] = _
+ var nxTransTypeTree: Array[Int] = _
+ var nxTransBind: Array[Int] = _
+ var nxTransAlternative: Array[Int] = _
+ var nxTransUnApply: Array[Int] = _
+ var nxTransValDef: Array[Int] = _
+ var nxTransDefDef: Array[Int] = _
+ var nxTransTypeDef: Array[Int] = _
+ var nxTransTemplate: Array[Int] = _
+ var nxTransPackageDef: Array[Int] = _
+ var nxTransStats: Array[Int] = _
+ var nxTransUnit: Array[Int] = _
+ var nxTransOther: Array[Int] = _
+ }
+
+ /** A group of tree transforms that are applied in sequence during the same phase */
+ abstract class TreeTransformer extends Phase {
+
+ def miniPhases: Array[MiniPhase]
+
+ override def run(implicit ctx: Context): Unit = {
+ val curTree = ctx.compilationUnit.tpdTree
+ val newTree = macroTransform(curTree)
+ ctx.compilationUnit.tpdTree = newTree
+ }
+
+ def mutateTransformers[T](info: TransformerInfo, mutator: Mutator[T], mutationPlan: Array[Int], tree: T, cur: Int)(implicit ctx: Context) = {
+ var transformersCopied = false
+ var nxCopied = false
+ var result = info.transformers
+ var resultNX = info.nx
+ var i = mutationPlan(cur)
+ // @DarkDimius You commented on the previous version
+ //
+ // var i = mutationPlan(0) // if TreeTransform.transform() method didn't exist we could have used mutationPlan(cur)
+ //
+ // But we need to use `cur` or otherwise we call prepare actions preceding the
+ // phase that issued a transformFollowing. This can lead to "denotation not defined
+ // here" errors. Note that tests still pass with the current modified code.
+ val l = result.length
+ var allDone = i < l
+ while (i < l) {
+ val oldTransform = result(i)
+ val newTransform = mutator(oldTransform, tree, ctx.withPhase(oldTransform.treeTransformPhase))
+ allDone = allDone && (newTransform eq NoTransform)
+ if (!(oldTransform eq newTransform)) {
+ if (!transformersCopied) result = result.clone()
+ transformersCopied = true
+ result(i) = newTransform
+ if (!(newTransform.getClass == oldTransform.getClass)) {
+ resultNX = new NXTransformations(resultNX, newTransform, i, nxCopied)
+ nxCopied = true
+ }
+ }
+ i = mutationPlan(i + 1)
+ }
+ if (allDone) null
+ else if (!transformersCopied) info
+ else new TransformerInfo(result, resultNX, info.group)
+ }
+
+ val prepForIdent: Mutator[Ident] = (trans, tree, ctx) => trans.prepareForIdent(tree)(ctx)
+ val prepForSelect: Mutator[Select] = (trans, tree, ctx) => trans.prepareForSelect(tree)(ctx)
+ val prepForThis: Mutator[This] = (trans, tree, ctx) => trans.prepareForThis(tree)(ctx)
+ val prepForSuper: Mutator[Super] = (trans, tree, ctx) => trans.prepareForSuper(tree)(ctx)
+ val prepForApply: Mutator[Apply] = (trans, tree, ctx) => trans.prepareForApply(tree)(ctx)
+ val prepForTypeApply: Mutator[TypeApply] = (trans, tree, ctx) => trans.prepareForTypeApply(tree)(ctx)
+ val prepForNew: Mutator[New] = (trans, tree, ctx) => trans.prepareForNew(tree)(ctx)
+ val prepForTyped: Mutator[Typed] = (trans, tree, ctx) => trans.prepareForTyped(tree)(ctx)
+ val prepForAssign: Mutator[Assign] = (trans, tree, ctx) => trans.prepareForAssign(tree)(ctx)
+ val prepForLiteral: Mutator[Literal] = (trans, tree, ctx) => trans.prepareForLiteral(tree)(ctx)
+ val prepForBlock: Mutator[Block] = (trans, tree, ctx) => trans.prepareForBlock(tree)(ctx)
+ val prepForIf: Mutator[If] = (trans, tree, ctx) => trans.prepareForIf(tree)(ctx)
+ val prepForClosure: Mutator[Closure] = (trans, tree, ctx) => trans.prepareForClosure(tree)(ctx)
+ val prepForMatch: Mutator[Match] = (trans, tree, ctx) => trans.prepareForMatch(tree)(ctx)
+ val prepForCaseDef: Mutator[CaseDef] = (trans, tree, ctx) => trans.prepareForCaseDef(tree)(ctx)
+ val prepForReturn: Mutator[Return] = (trans, tree, ctx) => trans.prepareForReturn(tree)(ctx)
+ val prepForTry: Mutator[Try] = (trans, tree, ctx) => trans.prepareForTry(tree)(ctx)
+ val prepForSeqLiteral: Mutator[SeqLiteral] = (trans, tree, ctx) => trans.prepareForSeqLiteral(tree)(ctx)
+ val prepForInlined: Mutator[Inlined] = (trans, tree, ctx) => trans.prepareForInlined(tree)(ctx)
+ val prepForTypeTree: Mutator[TypeTree] = (trans, tree, ctx) => trans.prepareForTypeTree(tree)(ctx)
+ val prepForBind: Mutator[Bind] = (trans, tree, ctx) => trans.prepareForBind(tree)(ctx)
+ val prepForAlternative: Mutator[Alternative] = (trans, tree, ctx) => trans.prepareForAlternative(tree)(ctx)
+ val prepForUnApply: Mutator[UnApply] = (trans, tree, ctx) => trans.prepareForUnApply(tree)(ctx)
+ val prepForValDef: Mutator[ValDef] = (trans, tree, ctx) => trans.prepareForValDef(tree)(ctx)
+ val prepForDefDef: Mutator[DefDef] = (trans, tree, ctx) => trans.prepareForDefDef(tree)(ctx)
+ val prepForTypeDef: Mutator[TypeDef] = (trans, tree, ctx) => trans.prepareForTypeDef(tree)(ctx)
+ val prepForTemplate: Mutator[Template] = (trans, tree, ctx) => trans.prepareForTemplate(tree)(ctx)
+ val prepForPackageDef: Mutator[PackageDef] = (trans, tree, ctx) => trans.prepareForPackageDef(tree)(ctx)
+ val prepForStats: Mutator[List[Tree]] = (trans, trees, ctx) => trans.prepareForStats(trees)(ctx)
+ val prepForUnit: Mutator[Tree] = (trans, tree, ctx) => trans.prepareForUnit(tree)(ctx)
+
+ val initialTransformationsCache = miniPhases.zipWithIndex.map {
+ case (miniPhase, id) =>
+ miniPhase.idx = id
+ miniPhase.treeTransform
+ }
+
+ val initialInfoCache = new TransformerInfo(initialTransformationsCache, new NXTransformations(initialTransformationsCache), this)
+
+ def macroTransform(t: Tree)(implicit ctx: Context): Tree = {
+ val info = initialInfoCache
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForUnit, info.nx.nxPrepUnit, t, 0)
+ if (mutatedInfo eq null) t
+ else goUnit(transform(t, mutatedInfo, 0), mutatedInfo.nx.nxTransUnit(0))
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goIdent(tree: Ident, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformIdent(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Ident => goIdent(t, info.nx.nxTransIdent(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goSelect(tree: Select, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformSelect(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Select => goSelect(t, info.nx.nxTransSelect(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goThis(tree: This, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformThis(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: This => goThis(t, info.nx.nxTransThis(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goSuper(tree: Super, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformSuper(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Super => goSuper(t, info.nx.nxTransSuper(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goApply(tree: Apply, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformApply(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Apply => goApply(t, info.nx.nxTransApply(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goTypeApply(tree: TypeApply, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformTypeApply(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: TypeApply => goTypeApply(t, info.nx.nxTransTypeApply(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goNew(tree: New, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformNew(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: New => goNew(t, info.nx.nxTransNew(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goTyped(tree: Typed, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformTyped(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Typed => goTyped(t, info.nx.nxTransTyped(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goAssign(tree: Assign, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformAssign(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Assign => goAssign(t, info.nx.nxTransAssign(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goLiteral(tree: Literal, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformLiteral(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Literal => goLiteral(t, info.nx.nxTransLiteral(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goBlock(tree: Block, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformBlock(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Block => goBlock(t, info.nx.nxTransBlock(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goIf(tree: If, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformIf(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: If => goIf(t, info.nx.nxTransIf(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goClosure(tree: Closure, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformClosure(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Closure => goClosure(t, info.nx.nxTransClosure(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goMatch(tree: Match, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformMatch(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Match => goMatch(t, info.nx.nxTransMatch(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goCaseDef(tree: CaseDef, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformCaseDef(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: CaseDef => goCaseDef(t, info.nx.nxTransCaseDef(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goReturn(tree: Return, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformReturn(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Return => goReturn(t, info.nx.nxTransReturn(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goTry(tree: Try, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformTry(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Try => goTry(t, info.nx.nxTransTry(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goSeqLiteral(tree: SeqLiteral, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformSeqLiteral(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: SeqLiteral => goSeqLiteral(t, info.nx.nxTransSeqLiteral(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goInlined(tree: Inlined, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformInlined(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Inlined => goInlined(t, info.nx.nxTransInlined(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goTypeTree(tree: TypeTree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformTypeTree(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: TypeTree => goTypeTree(t, info.nx.nxTransTypeTree(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goBind(tree: Bind, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformBind(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Bind => goBind(t, info.nx.nxTransBind(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goAlternative(tree: Alternative, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformAlternative(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Alternative => goAlternative(t, info.nx.nxTransAlternative(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goValDef(tree: ValDef, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformValDef(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: ValDef => goValDef(t, info.nx.nxTransValDef(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goDefDef(tree: DefDef, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformDefDef(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: DefDef => goDefDef(t, info.nx.nxTransDefDef(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goUnApply(tree: UnApply, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformUnApply(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: UnApply => goUnApply(t, info.nx.nxTransUnApply(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goTypeDef(tree: TypeDef, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformTypeDef(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: TypeDef => goTypeDef(t, info.nx.nxTransTypeDef(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goTemplate(tree: Template, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformTemplate(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Template => goTemplate(t, info.nx.nxTransTemplate(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goPackageDef(tree: PackageDef, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ trans.transformPackageDef(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: PackageDef => goPackageDef(t, info.nx.nxTransPackageDef(cur + 1))
+ case t => transformSingle(t, cur + 1)
+ }
+ } else tree
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goUnit(tree: Tree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ val t = trans.transformUnit(tree)(ctx.withPhase(trans.treeTransformPhase), info)
+ goUnit(t, info.nx.nxTransUnit(cur + 1))
+ } else tree
+ }
+
+ final private[TreeTransforms] def goOther(tree: Tree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ val t = trans.transformOther(tree)(ctx.withPhase(trans.treeTransformPhase), info)
+ transformSingle(t, cur + 1)
+ } else tree
+ }
+
+ final private[TreeTransforms] def goNamed(tree: NameTree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree =
+ tree match {
+ case tree: Ident => goIdent(tree, info.nx.nxTransIdent(cur))
+ case tree: Select => goSelect(tree, info.nx.nxTransSelect(cur))
+ case tree: Bind => goBind(tree, cur)
+ case tree: ValDef if !tree.isEmpty => goValDef(tree, info.nx.nxTransValDef(cur))
+ case tree: DefDef => goDefDef(tree, info.nx.nxTransDefDef(cur))
+ case tree: TypeDef => goTypeDef(tree, info.nx.nxTransTypeDef(cur))
+ case _ => tree
+ }
+
+ final private[TreeTransforms] def goUnnamed(tree: Tree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree =
+ tree match {
+ case tree: This => goThis(tree, info.nx.nxTransThis(cur))
+ case tree: Super => goSuper(tree, info.nx.nxTransSuper(cur))
+ case tree: Apply => goApply(tree, info.nx.nxTransApply(cur))
+ case tree: TypeApply => goTypeApply(tree, info.nx.nxTransTypeApply(cur))
+ case tree: Literal => goLiteral(tree, info.nx.nxTransLiteral(cur))
+ case tree: New => goNew(tree, info.nx.nxTransNew(cur))
+ case tree: Typed => goTyped(tree, info.nx.nxTransTyped(cur))
+ case tree: Assign => goAssign(tree, info.nx.nxTransAssign(cur))
+ case tree: Block => goBlock(tree, info.nx.nxTransBlock(cur))
+ case tree: If => goIf(tree, info.nx.nxTransIf(cur))
+ case tree: Closure => goClosure(tree, info.nx.nxTransClosure(cur))
+ case tree: Match => goMatch(tree, info.nx.nxTransMatch(cur))
+ case tree: CaseDef => goCaseDef(tree, info.nx.nxTransCaseDef(cur))
+ case tree: Return => goReturn(tree, info.nx.nxTransReturn(cur))
+ case tree: Try => goTry(tree, info.nx.nxTransTry(cur))
+ case tree: SeqLiteral => goSeqLiteral(tree, info.nx.nxTransSeqLiteral(cur))
+ case tree: Inlined => goInlined(tree, info.nx.nxTransInlined(cur))
+ case tree: TypeTree => goTypeTree(tree, info.nx.nxTransTypeTree(cur))
+ case tree: Alternative => goAlternative(tree, info.nx.nxTransAlternative(cur))
+ case tree: UnApply => goUnApply(tree, info.nx.nxTransUnApply(cur))
+ case tree: Template => goTemplate(tree, info.nx.nxTransTemplate(cur))
+ case tree: PackageDef => goPackageDef(tree, info.nx.nxTransPackageDef(cur))
+ case Thicket(trees) => tree
+ case tree => goOther(tree, info.nx.nxTransOther(cur))
+ }
+
+ final private[TreeTransforms] def transformSingle(tree: Tree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree =
+ if (cur < info.transformers.length) {
+ tree match {
+ // split one big match into 2 smaller ones
+ case tree: NameTree => goNamed(tree, cur)
+ case tree => goUnnamed(tree, cur)
+ }
+ } else tree
+
+ // TODO merge with localCtx in MacroTransform
+ // Generally: If we will keep MacroTransform, merge common behavior with TreeTransform
+ def localContext(sym: Symbol)(implicit ctx: Context) = {
+ val owner = if (sym is PackageVal) sym.moduleClass else sym
+ ctx.fresh.setOwner(owner)
+ }
+
+ final private[TreeTransforms] def transformNamed(tree: NameTree, info: TransformerInfo, cur: Int)(implicit ctx: Context): Tree =
+ tree match {
+ case tree: Ident =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForIdent, info.nx.nxPrepIdent, tree, cur)
+ // Dotty deviation: implicits need explicit type
+ if (mutatedInfo eq null) tree
+ else goIdent(tree, mutatedInfo.nx.nxTransIdent(cur))
+ case tree: Select =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForSelect, info.nx.nxPrepSelect, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val qual = transform(tree.qualifier, mutatedInfo, cur)
+ goSelect(cpy.Select(tree)(qual, tree.name), mutatedInfo.nx.nxTransSelect(cur))
+ }
+ case tree: Bind =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForBind, info.nx.nxPrepBind, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val body = transform(tree.body, mutatedInfo, cur)
+ goBind(cpy.Bind(tree)(tree.name, body), mutatedInfo.nx.nxTransBind(cur))
+ }
+ case tree: ValDef if !tree.isEmpty => // As a result of discussing with Martin: emptyValDefs shouldn't be copied // NAME
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForValDef, info.nx.nxPrepValDef, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val nestedCtx = if (tree.symbol.exists) localContext(tree.symbol) else ctx
+ val tpt = transform(tree.tpt, mutatedInfo, cur)(nestedCtx)
+ val rhs = transform(tree.rhs, mutatedInfo, cur)(nestedCtx)
+ goValDef(cpy.ValDef(tree)(tree.name, tpt, rhs), mutatedInfo.nx.nxTransValDef(cur))
+ }
+ case tree: DefDef =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForDefDef, info.nx.nxPrepDefDef, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val nestedCtx = localContext(tree.symbol)
+ val tparams = transformSubTrees(tree.tparams, mutatedInfo, cur)(nestedCtx)
+ val vparams = tree.vparamss.mapConserve(x => transformSubTrees(x, mutatedInfo, cur)(nestedCtx))
+ val tpt = transform(tree.tpt, mutatedInfo, cur)(nestedCtx)
+ val rhs = transform(tree.rhs, mutatedInfo, cur)(nestedCtx)
+ goDefDef(cpy.DefDef(tree)(tree.name, tparams, vparams, tpt, rhs), mutatedInfo.nx.nxTransDefDef(cur))
+ }
+ case tree: TypeDef =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTypeDef, info.nx.nxPrepTypeDef, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val rhs = transform(tree.rhs, mutatedInfo, cur)(localContext(tree.symbol))
+ goTypeDef(cpy.TypeDef(tree)(tree.name, rhs), mutatedInfo.nx.nxTransTypeDef(cur))
+ }
+ case _ =>
+ tree
+ }
+
+ final private[TreeTransforms] def transformUnnamed(tree: Tree, info: TransformerInfo, cur: Int)(implicit ctx: Context): Tree =
+ tree match {
+ case tree: This =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForThis, info.nx.nxPrepThis, tree, cur)
+ if (mutatedInfo eq null) tree
+ else goThis(tree, mutatedInfo.nx.nxTransThis(cur))
+ case tree: Super =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForSuper, info.nx.nxPrepSuper, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val qual = transform(tree.qual, mutatedInfo, cur)
+ goSuper(cpy.Super(tree)(qual, tree.mix), mutatedInfo.nx.nxTransSuper(cur))
+ }
+ case tree: Apply =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForApply, info.nx.nxPrepApply, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val fun = transform(tree.fun, mutatedInfo, cur)
+ val args = transformSubTrees(tree.args, mutatedInfo, cur)
+ goApply(cpy.Apply(tree)(fun, args), mutatedInfo.nx.nxTransApply(cur))
+ }
+ case tree: TypeApply =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTypeApply, info.nx.nxPrepTypeApply, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val fun = transform(tree.fun, mutatedInfo, cur)
+ val args = transformTrees(tree.args, mutatedInfo, cur)
+ goTypeApply(cpy.TypeApply(tree)(fun, args), mutatedInfo.nx.nxTransTypeApply(cur))
+ }
+ case tree: Literal =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForLiteral, info.nx.nxPrepLiteral, tree, cur)
+ if (mutatedInfo eq null) tree
+ else goLiteral(tree, mutatedInfo.nx.nxTransLiteral(cur))
+ case tree: New =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForNew, info.nx.nxPrepNew, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val tpt = transform(tree.tpt, mutatedInfo, cur)
+ goNew(cpy.New(tree)(tpt), mutatedInfo.nx.nxTransNew(cur))
+ }
+ case tree: Typed =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTyped, info.nx.nxPrepTyped, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val expr = transform(tree.expr, mutatedInfo, cur)
+ val tpt = transform(tree.tpt, mutatedInfo, cur)
+ goTyped(cpy.Typed(tree)(expr, tpt), mutatedInfo.nx.nxTransTyped(cur))
+ }
+ case tree: Assign =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForAssign, info.nx.nxPrepAssign, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val lhs = transform(tree.lhs, mutatedInfo, cur)
+ val rhs = transform(tree.rhs, mutatedInfo, cur)
+ goAssign(cpy.Assign(tree)(lhs, rhs), mutatedInfo.nx.nxTransAssign(cur))
+ }
+ case tree: Block =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForBlock, info.nx.nxPrepBlock, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val stats = transformStats(tree.stats, ctx.owner, mutatedInfo, cur)
+ val expr = transform(tree.expr, mutatedInfo, cur)
+ goBlock(cpy.Block(tree)(stats, expr), mutatedInfo.nx.nxTransBlock(cur))
+ }
+ case tree: If =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForIf, info.nx.nxPrepIf, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val cond = transform(tree.cond, mutatedInfo, cur)
+ val thenp = transform(tree.thenp, mutatedInfo, cur)
+ val elsep = transform(tree.elsep, mutatedInfo, cur)
+ goIf(cpy.If(tree)(cond, thenp, elsep), mutatedInfo.nx.nxTransIf(cur))
+ }
+ case tree: Closure =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForClosure, info.nx.nxPrepClosure, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val env = transformTrees(tree.env, mutatedInfo, cur)
+ val meth = transform(tree.meth, mutatedInfo, cur)
+ val tpt = transform(tree.tpt, mutatedInfo, cur)
+ goClosure(cpy.Closure(tree)(env, meth, tpt), mutatedInfo.nx.nxTransClosure(cur))
+ }
+ case tree: Match =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForMatch, info.nx.nxPrepMatch, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val selector = transform(tree.selector, mutatedInfo, cur)
+ val cases = transformSubTrees(tree.cases, mutatedInfo, cur)
+ goMatch(cpy.Match(tree)(selector, cases), mutatedInfo.nx.nxTransMatch(cur))
+ }
+ case tree: CaseDef =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForCaseDef, info.nx.nxPrepCaseDef, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val pat = transform(tree.pat, mutatedInfo, cur)(ctx.addMode(Mode.Pattern))
+ val guard = transform(tree.guard, mutatedInfo, cur)
+ val body = transform(tree.body, mutatedInfo, cur)
+ goCaseDef(cpy.CaseDef(tree)(pat, guard, body), mutatedInfo.nx.nxTransCaseDef(cur))
+ }
+ case tree: Return =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForReturn, info.nx.nxPrepReturn, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val expr = transform(tree.expr, mutatedInfo, cur)
+ val from = tree.from
+ // don't transform the `from` part, as this is not a normal ident, but
+ // a pointer to the enclosing method. Transforming this as a normal ident
+ // can go wrong easily. If a transformation is needed, it should be
+ // the responsibility of the transformReturn method to handle this also.
+ goReturn(cpy.Return(tree)(expr, from), mutatedInfo.nx.nxTransReturn(cur))
+ }
+ case tree: Try =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTry, info.nx.nxPrepTry, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val block = transform(tree.expr, mutatedInfo, cur)
+ val cases1 = tree.cases.mapConserve(transform(_, mutatedInfo, cur)).asInstanceOf[List[CaseDef]]
+ val finalizer = transform(tree.finalizer, mutatedInfo, cur)
+ goTry(cpy.Try(tree)(block, cases1, finalizer), mutatedInfo.nx.nxTransTry(cur))
+ }
+ case tree: SeqLiteral =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForSeqLiteral, info.nx.nxPrepSeqLiteral, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val elems = transformTrees(tree.elems, mutatedInfo, cur)
+ val elemtpt = transform(tree.elemtpt, mutatedInfo, cur)
+ goSeqLiteral(cpy.SeqLiteral(tree)(elems, elemtpt), mutatedInfo.nx.nxTransSeqLiteral(cur))
+ }
+ case tree: Inlined =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForInlined, info.nx.nxPrepInlined, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val bindings = transformSubTrees(tree.bindings, mutatedInfo, cur)
+ val expansion = transform(tree.expansion, mutatedInfo, cur)(inlineContext(tree))
+ goInlined(cpy.Inlined(tree)(tree.call, bindings, expansion), mutatedInfo.nx.nxTransInlined(cur))
+ }
+ case tree: TypeTree =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTypeTree, info.nx.nxPrepTypeTree, tree, cur)
+ if (mutatedInfo eq null) tree
+ else goTypeTree(tree, mutatedInfo.nx.nxTransTypeTree(cur))
+ case tree: Alternative =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForAlternative, info.nx.nxPrepAlternative, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val trees = transformTrees(tree.trees, mutatedInfo, cur)
+ goAlternative(cpy.Alternative(tree)(trees), mutatedInfo.nx.nxTransAlternative(cur))
+ }
+ case tree: UnApply =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForUnApply, info.nx.nxPrepUnApply, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val fun = transform(tree.fun, mutatedInfo, cur)
+ val implicits = transformTrees(tree.implicits, mutatedInfo, cur)
+ val patterns = transformTrees(tree.patterns, mutatedInfo, cur)
+ goUnApply(cpy.UnApply(tree)(fun, implicits, patterns), mutatedInfo.nx.nxTransUnApply(cur))
+ }
+ case tree: Template =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTemplate, info.nx.nxPrepTemplate, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val constr = transformSub(tree.constr, mutatedInfo, cur)
+ val parents = transformTrees(tree.parents, mutatedInfo, cur)(ctx.superCallContext)
+ val self = transformSub(tree.self, mutatedInfo, cur)
+ val body = transformStats(tree.body, tree.symbol, mutatedInfo, cur)
+ goTemplate(cpy.Template(tree)(constr, parents, self, body), mutatedInfo.nx.nxTransTemplate(cur))
+ }
+ case tree: PackageDef =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForPackageDef, info.nx.nxPrepPackageDef, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val nestedCtx = localContext(tree.symbol)
+ val pid = transformSub(tree.pid, mutatedInfo, cur)
+ val stats = transformStats(tree.stats, tree.symbol, mutatedInfo, cur)(nestedCtx)
+ goPackageDef(cpy.PackageDef(tree)(pid, stats), mutatedInfo.nx.nxTransPackageDef(cur))
+ }
+ case Thicket(trees) =>
+ cpy.Thicket(tree)(transformTrees(trees, info, cur))
+ case tree =>
+ implicit val originalInfo: TransformerInfo = info
+ goOther(tree, info.nx.nxTransOther(cur))
+ }
+
+ private var crashingTree: Tree = EmptyTree
+
+ def transform(tree: Tree, info: TransformerInfo, cur: Int)(implicit ctx: Context): Tree = ctx.traceIndented(s"transforming ${tree.show} at ${ctx.phase}", transforms, show = true) {
+ try
+ if (cur < info.transformers.length) {
+ // if cur > 0 then some of the symbols can be created by already performed transformations
+ // this means that their denotations could not exists in previous period
+ val pctx = ctx.withPhase(info.transformers(cur).treeTransformPhase)
+ tree match {
+ //split one big match into 2 smaller ones
+ case tree: NameTree => transformNamed(tree, info, cur)(pctx)
+ case tree => transformUnnamed(tree, info, cur)(pctx)
+ }
+ } else tree
+ catch {
+ case NonFatal(ex) =>
+ if (tree ne crashingTree) {
+ crashingTree = tree
+ println(i"exception while transforming $tree of class ${tree.getClass} # ${tree.uniqueId}")
+ }
+ throw ex
+ }
+ }
+
+ @tailrec
+ final private[TreeTransforms] def goStats(trees: List[Tree], cur: Int)(implicit ctx: Context, info: TransformerInfo): List[Tree] = {
+ if (cur < info.transformers.length) {
+ val trans = info.transformers(cur)
+ val stats = trans.transformStats(trees)(ctx.withPhase(trans.treeTransformPhase), info)
+ goStats(stats, info.nx.nxTransStats(cur + 1))
+ } else trees
+ }
+
+ def transformStats(trees: List[Tree], exprOwner: Symbol, info: TransformerInfo, current: Int)(implicit ctx: Context): List[Tree] = {
+ val newInfo = mutateTransformers(info, prepForStats, info.nx.nxPrepStats, trees, current)
+ def transformStat(stat: Tree): Tree = stat match {
+ case _: Import | _: DefTree => transform(stat, newInfo, current)
+ case Thicket(stats) => cpy.Thicket(stat)(stats mapConserve transformStat)
+ case _ => transform(stat, newInfo, current)(ctx.exprContext(stat, exprOwner))
+ }
+ val newTrees = flatten(trees.mapconserve(transformStat))
+ goStats(newTrees, newInfo.nx.nxTransStats(current))(ctx, newInfo)
+ }
+
+ def transformTrees(trees: List[Tree], info: TransformerInfo, current: Int)(implicit ctx: Context): List[Tree] =
+ flatten(trees mapConserve (x => transform(x, info, current)))
+
+ def transformSub[Tr <: Tree](tree: Tr, info: TransformerInfo, current: Int)(implicit ctx: Context): Tr =
+ transform(tree, info, current).asInstanceOf[Tr]
+
+ def transformSubTrees[Tr <: Tree](trees: List[Tr], info: TransformerInfo, current: Int)(implicit ctx: Context): List[Tr] =
+ transformTrees(trees, info, current)(ctx).asInstanceOf[List[Tr]]
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
new file mode 100644
index 000000000..9a6ecef51
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
@@ -0,0 +1,99 @@
+package dotty.tools.dotc
+package transform
+
+import core.Symbols._
+import core.StdNames._
+import ast.Trees._
+import core.Types._
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Flags
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+import dotty.tools.dotc.util.Positions.Position
+
+/** Compiles the cases that can not be handled by primitive catch cases as a common pattern match.
+ *
+ * The following code:
+ * ```
+ * try { <code> }
+ * catch {
+ * <tryCases> // Cases that can be handled by catch
+ * <patternMatchCases> // Cases starting with first one that can't be handled by catch
+ * }
+ * ```
+ * will become:
+ * ```
+ * try { <code> }
+ * catch {
+ * <tryCases>
+ * case e => e match {
+ * <patternMatchCases>
+ * }
+ * }
+ * ```
+ *
+ * Cases that are not supported include:
+ * - Applies and unapplies
+ * - Idents
+ * - Alternatives
+ * - `case _: T =>` where `T` is not `Throwable`
+ *
+ */
+class TryCatchPatterns extends MiniPhaseTransform {
+ import dotty.tools.dotc.ast.tpd._
+
+ def phaseName: String = "tryCatchPatterns"
+
+ override def runsAfter = Set(classOf[ElimRepeated])
+
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case Try(_, cases, _) =>
+ cases.foreach {
+ case CaseDef(Typed(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.")
+ case CaseDef(Bind(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.")
+ case c =>
+ assert(isDefaultCase(c), "Pattern in Try should be Bind, Typed or default case.")
+ }
+ case _ =>
+ }
+
+ override def transformTry(tree: Try)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val (tryCases, patternMatchCases) = tree.cases.span(isCatchCase)
+ val fallbackCase = mkFallbackPatterMatchCase(patternMatchCases, tree.pos)
+ cpy.Try(tree)(cases = tryCases ++ fallbackCase)
+ }
+
+ /** Is this pattern node a catch-all or type-test pattern? */
+ private def isCatchCase(cdef: CaseDef)(implicit ctx: Context): Boolean = cdef match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => isSimpleThrowable(tpt.tpe)
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => isSimpleThrowable(tpt.tpe)
+ case _ => isDefaultCase(cdef)
+ }
+
+ private def isSimpleThrowable(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp @ TypeRef(pre, _) =>
+ (pre == NoPrefix || pre.widen.typeSymbol.isStatic) && // Does not require outer class check
+ !tp.symbol.is(Flags.Trait) && // Traits not supported by JVM
+ tp.derivesFrom(defn.ThrowableClass)
+ case _ =>
+ false
+ }
+
+ private def mkFallbackPatterMatchCase(patternMatchCases: List[CaseDef], pos: Position)(
+ implicit ctx: Context, info: TransformerInfo): Option[CaseDef] = {
+ if (patternMatchCases.isEmpty) None
+ else {
+ val exName = ctx.freshName("ex").toTermName
+ val fallbackSelector =
+ ctx.newSymbol(ctx.owner, exName, Flags.Synthetic | Flags.Case, defn.ThrowableType, coord = pos)
+ val sel = Ident(fallbackSelector.termRef).withPos(pos)
+ val rethrow = CaseDef(EmptyTree, EmptyTree, Throw(ref(fallbackSelector)))
+ Some(CaseDef(
+ Bind(fallbackSelector, Underscore(fallbackSelector.info).withPos(pos)),
+ EmptyTree,
+ transformFollowing(Match(sel, patternMatchCases ::: rethrow :: Nil)))
+ )
+ }
+ }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
new file mode 100644
index 000000000..3774127fa
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
@@ -0,0 +1,124 @@
+package dotty.tools.dotc
+package transform
+
+import core.Contexts._
+import core.Symbols._
+import core.Types._
+import core.Constants._
+import core.StdNames._
+import core.TypeErasure.isUnboundedGeneric
+import ast.Trees._
+import Erasure.Boxing._
+import core.TypeErasure._
+import ValueClasses._
+
+/** This transform normalizes type tests and type casts,
+ * also replacing type tests with singleton argument type with reference equality check
+ * Any remaining type tests
+ * - use the object methods $isInstanceOf and $asInstanceOf
+ * - have a reference type as receiver
+ * - can be translated directly to machine instructions
+ *
+ *
+ * Unfortunately this phase ended up being not Y-checkable unless types are erased. A cast to an ConstantType(3) or x.type
+ * cannot be rewritten before erasure.
+ */
+trait TypeTestsCasts {
+ import ast.tpd._
+
+ // override def phaseName: String = "typeTestsCasts"
+
+ def interceptTypeApply(tree: TypeApply)(implicit ctx: Context): Tree = ctx.traceIndented(s"transforming ${tree.show}", show = true) {
+ tree.fun match {
+ case fun @ Select(qual, selector) =>
+ val sym = tree.symbol
+
+ def isPrimitive(tp: Type) = tp.classSymbol.isPrimitiveValueClass
+
+ def derivedTree(qual1: Tree, sym: Symbol, tp: Type) =
+ cpy.TypeApply(tree)(qual1.select(sym).withPos(qual.pos), List(TypeTree(tp)))
+
+ def qualCls = qual.tpe.widen.classSymbol
+
+ def transformIsInstanceOf(expr:Tree, argType: Type): Tree = {
+ def argCls = argType.classSymbol
+ if ((expr.tpe <:< argType) && isPureExpr(expr))
+ Literal(Constant(true)) withPos tree.pos
+ else if (argCls.isPrimitiveValueClass)
+ if (qualCls.isPrimitiveValueClass) Literal(Constant(qualCls == argCls)) withPos tree.pos
+ else transformIsInstanceOf(expr, defn.boxedType(argCls.typeRef))
+ else argType.dealias match {
+ case _: SingletonType =>
+ val cmpOp = if (argType derivesFrom defn.AnyValClass) defn.Any_equals else defn.Object_eq
+ expr.select(cmpOp).appliedTo(singleton(argType))
+ case AndType(tp1, tp2) =>
+ evalOnce(expr) { fun =>
+ val erased1 = transformIsInstanceOf(fun, tp1)
+ val erased2 = transformIsInstanceOf(fun, tp2)
+ erased1 match {
+ case Literal(Constant(true)) => erased2
+ case _ =>
+ erased2 match {
+ case Literal(Constant(true)) => erased1
+ case _ => erased1 and erased2
+ }
+ }
+ }
+ case defn.MultiArrayOf(elem, ndims) if isUnboundedGeneric(elem) =>
+ def isArrayTest(arg: Tree) =
+ ref(defn.runtimeMethodRef(nme.isArray)).appliedTo(arg, Literal(Constant(ndims)))
+ if (ndims == 1) isArrayTest(qual)
+ else evalOnce(qual) { qual1 =>
+ derivedTree(qual1, defn.Any_isInstanceOf, qual1.tpe) and isArrayTest(qual1)
+ }
+ case _ =>
+ derivedTree(expr, defn.Any_isInstanceOf, argType)
+ }
+ }
+
+ def transformAsInstanceOf(argType: Type): Tree = {
+ def argCls = argType.widen.classSymbol
+ if (qual.tpe <:< argType)
+ Typed(qual, tree.args.head)
+ else if (qualCls.isPrimitiveValueClass) {
+ if (argCls.isPrimitiveValueClass) primitiveConversion(qual, argCls)
+ else derivedTree(box(qual), defn.Any_asInstanceOf, argType)
+ }
+ else if (argCls.isPrimitiveValueClass)
+ unbox(qual.ensureConforms(defn.ObjectType), argType)
+ else if (isDerivedValueClass(argCls)) {
+ qual // adaptToType in Erasure will do the necessary type adaptation
+ }
+ else
+ derivedTree(qual, defn.Any_asInstanceOf, argType)
+ }
+
+ /** Transform isInstanceOf OrType
+ *
+ * expr.isInstanceOf[A | B] ~~> expr.isInstanceOf[A] | expr.isInstanceOf[B]
+ *
+ * The transform happens before erasure of `argType`, thus cannot be merged
+ * with `transformIsInstanceOf`, which depends on erased type of `argType`.
+ */
+ def transformOrTypeTest(qual: Tree, argType: Type): Tree = argType.dealias match {
+ case OrType(tp1, tp2) =>
+ evalOnce(qual) { fun =>
+ transformOrTypeTest(fun, tp1)
+ .select(nme.OR)
+ .appliedTo(transformOrTypeTest(fun, tp2))
+ }
+ case _ =>
+ transformIsInstanceOf(qual, erasure(argType))
+ }
+
+ if (sym eq defn.Any_isInstanceOf)
+ transformOrTypeTest(qual, tree.args.head.tpe)
+ else if (sym eq defn.Any_asInstanceOf)
+ transformAsInstanceOf(erasure(tree.args.head.tpe))
+ else tree
+
+ case _ =>
+ tree
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
new file mode 100644
index 000000000..d474c77b4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
@@ -0,0 +1,34 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TypeErasure.ErasedValueType
+import Types._
+import Contexts._
+import Symbols._
+import Decorators._
+import StdNames.nme
+import NameOps._
+import language.implicitConversions
+
+object TypeUtils {
+ implicit def decorateTypeUtils(tpe: Type): TypeUtils = new TypeUtils(tpe)
+}
+
+/** A decorator that provides methods on types
+ * that are needed in the transformer pipeline.
+ */
+class TypeUtils(val self: Type) extends AnyVal {
+ import TypeUtils._
+
+ def isErasedValueType(implicit ctx: Context): Boolean =
+ self.isInstanceOf[ErasedValueType]
+
+ def isPrimitiveValueType(implicit ctx: Context): Boolean =
+ self.classSymbol.isPrimitiveValueClass
+
+ def ensureMethodic(implicit ctx: Context): Type = self match {
+ case self: MethodicType => self
+ case _ => if (ctx.erasedTypes) MethodType(Nil, self) else ExprType(self)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
new file mode 100644
index 000000000..1582158ac
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
@@ -0,0 +1,41 @@
+package dotty.tools.dotc
+package transform
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Trees._, StdNames._, Symbols._
+import DenotTransformers._, TreeTransforms._, Phases.Phase
+import ExtensionMethods._, TreeExtractors._, ValueClasses._
+
+/** This phase elides unnecessary value class allocations
+ *
+ * For a value class V defined as:
+ * class V(val underlying: U) extends AnyVal
+ * we avoid unnecessary allocations:
+ * new V(u1) == new V(u2) => u1 == u2
+ * (new V(u)).underlying() => u
+ */
+class VCElideAllocations extends MiniPhaseTransform with IdentityDenotTransformer {
+ import tpd._
+
+ override def phaseName: String = "vcElideAllocations"
+
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[ElimErasedValueType])
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ tree match {
+ // new V(u1) == new V(u2) => u1 == u2
+ // (We don't handle != because it has been eliminated by InterceptedMethods)
+ case BinaryOp(NewWithArgs(tp1, List(u1)), op, NewWithArgs(tp2, List(u2)))
+ if (tp1 eq tp2) && (op eq defn.Any_==) && isDerivedValueClass(tp1.typeSymbol) =>
+ // == is overloaded in primitive classes
+ applyOverloaded(u1, nme.EQ, List(u2), Nil, defn.BooleanType)
+
+ // (new V(u)).underlying() => u
+ case ValueClassUnbox(NewWithArgs(_, List(u))) =>
+ u
+
+ case _ =>
+ tree
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
new file mode 100644
index 000000000..ddd414417
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
@@ -0,0 +1,104 @@
+package dotty.tools.dotc
+package transform
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Trees._, Types._
+import DenotTransformers._, TreeTransforms._, Phases.Phase
+import ExtensionMethods._, ValueClasses._
+
+import collection.mutable.ListBuffer
+
+/** This phase inlines calls to methods of value classes.
+ *
+ * A value class V after [[ExtensionMethods]] will look like:
+ * class V[A, B, ...](val underlying: U) extends AnyVal {
+ * def foo[T, S, ...](arg1: A1, arg2: A2, ...) =
+ * V.foo$extension[T, S, ..., A, B, ...](this)(arg1, arg2, ...)
+ *
+ * ...
+ * }
+ *
+ * Let e have type V, if e is a stable prefix or if V does not have any class
+ * type parameter, then we can rewrite:
+ * e.foo[X, Y, ...](args)
+ * as:
+ * V.foo$extension[X, Y, ..., e.A, e.B, ...](e)(args)
+ * Otherwise, we need to evaluate e first:
+ * {
+ * val ev = e
+ * V.foo$extension[X, Y, ..., ev.A, ev.B, ...](ev)(args)
+ * }
+ *
+ * This phase needs to be placed after phases which may introduce calls to
+ * value class methods (like [[PatternMatcher]]). This phase uses name mangling
+ * to find the correct extension method corresponding to a value class method
+ * (see [[ExtensionMethods.extensionMethod]]), therefore we choose to place it
+ * before phases which may perform their own name mangling on value class
+ * methods (like [[TypeSpecializer]]), this way [[VCInlineMethods]] does not
+ * need to have any knowledge of the name mangling done by other phases.
+ */
+class VCInlineMethods extends MiniPhaseTransform with IdentityDenotTransformer {
+ import tpd._
+
+ override def phaseName: String = "vcInlineMethods"
+
+ override def runsAfter: Set[Class[_ <: Phase]] =
+ Set(classOf[ExtensionMethods], classOf[PatternMatcher])
+
+ /** Replace a value class method call by a call to the corresponding extension method.
+ *
+ * @param tree The tree corresponding to the method call
+ * @param mtArgs Type arguments for the method call not present in `tree`
+ * @param mArgss Arguments for the method call not present in `tree`
+ * @return A tree for the extension method call
+ */
+ private def rewire(tree: Tree, mtArgs: List[Tree] = Nil, mArgss: List[List[Tree]] = Nil)
+ (implicit ctx: Context): Tree =
+ tree match {
+ case Apply(qual, mArgs) =>
+ rewire(qual, mtArgs, mArgs :: mArgss)
+ case TypeApply(qual, mtArgs2) =>
+ assert(mtArgs == Nil)
+ rewire(qual, mtArgs2, mArgss)
+ case sel @ Select(qual, _) =>
+ val origMeth = sel.symbol
+ val ctParams = origMeth.enclosingClass.typeParams
+ val extensionMeth = extensionMethod(origMeth)
+
+ if (!ctParams.isEmpty) {
+ evalOnce(qual) { ev =>
+ val ctArgs = ctParams map (ev.select(_))
+ ref(extensionMeth)
+ .appliedToTypeTrees(mtArgs ++ ctArgs)
+ .appliedTo(ev)
+ .appliedToArgss(mArgss)
+ }
+ } else {
+ ref(extensionMeth)
+ .appliedToTypeTrees(mtArgs)
+ .appliedTo(qual)
+ .appliedToArgss(mArgss)
+ }
+ }
+
+ /** If this tree corresponds to a fully-applied value class method call, replace it
+ * by a call to the corresponding extension method, otherwise return it as is.
+ */
+ private def rewireIfNeeded(tree: Tree)(implicit ctx: Context) = tree.tpe.widen match {
+ case tp: MethodOrPoly =>
+ tree // The rewiring will be handled by a fully-applied parent node
+ case _ =>
+ if (isMethodWithExtension(tree.symbol))
+ rewire(tree).ensureConforms(tree.tpe)
+ else
+ tree
+ }
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree =
+ rewireIfNeeded(tree)
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ rewireIfNeeded(tree)
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree =
+ rewireIfNeeded(tree)
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
new file mode 100644
index 000000000..93005c57a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
@@ -0,0 +1,56 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Types._
+import Symbols._
+import SymDenotations._
+import Contexts._
+import Flags._
+import StdNames._
+
+/** Methods that apply to user-defined value classes */
+object ValueClasses {
+
+ def isDerivedValueClass(d: SymDenotation)(implicit ctx: Context) = {
+ !ctx.settings.XnoValueClasses.value &&
+ !d.isRefinementClass &&
+ d.isValueClass &&
+ (d.initial.symbol ne defn.AnyValClass) && // Compare the initial symbol because AnyVal does not exist after erasure
+ !d.isPrimitiveValueClass
+ }
+
+ def isMethodWithExtension(d: SymDenotation)(implicit ctx: Context) =
+ d.isRealMethod &&
+ isDerivedValueClass(d.owner) &&
+ !d.isConstructor &&
+ !d.is(SuperAccessor) &&
+ !d.is(Macro)
+
+ /** The member that of a derived value class that unboxes it. */
+ def valueClassUnbox(d: ClassDenotation)(implicit ctx: Context): Symbol =
+ // (info.decl(nme.unbox)).orElse(...) uncomment once we accept unbox methods
+ d.classInfo.decls
+ .find(d => d.isTerm && d.symbol.is(ParamAccessor))
+ .map(_.symbol)
+ .getOrElse(NoSymbol)
+
+ /** For a value class `d`, this returns the synthetic cast from the underlying type to
+ * ErasedValueType defined in the companion module. This method is added to the module
+ * and further described in [[ExtensionMethods]].
+ */
+ def u2evt(d: ClassDenotation)(implicit ctx: Context): Symbol =
+ d.linkedClass.info.decl(nme.U2EVT).symbol
+
+ /** For a value class `d`, this returns the synthetic cast from ErasedValueType to the
+ * underlying type defined in the companion module. This method is added to the module
+ * and further described in [[ExtensionMethods]].
+ */
+ def evt2u(d: ClassDenotation)(implicit ctx: Context): Symbol =
+ d.linkedClass.info.decl(nme.EVT2U).symbol
+
+ /** The unboxed type that underlies a derived value class */
+ def underlyingOfValueClass(d: ClassDenotation)(implicit ctx: Context): Type =
+ valueClassUnbox(d).info.resultType
+
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
new file mode 100644
index 000000000..8d926fcf0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
@@ -0,0 +1,615 @@
+package dotty.tools.dotc
+package transform
+package patmat
+
+import core.Types._
+import core.Contexts._
+import core.Flags._
+import ast.Trees._
+import ast.tpd
+import core.Decorators._
+import core.Symbols._
+import core.StdNames._
+import core.NameOps._
+import core.Constants._
+import reporting.diagnostic.messages._
+
+/** Space logic for checking exhaustivity and unreachability of pattern matching
+ *
+ * Space can be thought of as a set of possible values. A type or a pattern
+ * both refer to spaces. The space of a type is the values that inhabit the
+ * type. The space of a pattern is the values that can be covered by the
+ * pattern.
+ *
+ * Space is recursively defined as follows:
+ *
+ * 1. `Empty` is a space
+ * 2. For a type T, `Typ(T)` is a space
+ * 3. A union of spaces `S1 | S2 | ...` is a space
+ * 4. For a case class Kon(x1: T1, x2: T2, .., xn: Tn), if S1, S2, ..., Sn
+ * are spaces, then `Kon(S1, S2, ..., Sn)` is a space.
+ * 5. A constant `Const(value, T)` is a point in space
+ * 6. A stable identifier `Var(sym, T)` is a space
+ *
+ * For the problem of exhaustivity check, its formulation in terms of space is as follows:
+ *
+ * Is the space Typ(T) a subspace of the union of space covered by all the patterns?
+ *
+ * The problem of unreachable patterns can be formulated as follows:
+ *
+ * Is the space covered by a pattern a subspace of the space covered by previous patterns?
+ *
+ * Assumption:
+ * (1) One case class cannot be inherited directly or indirectly by another
+ * case class.
+ * (2) Inheritance of a case class cannot be well handled by the algorithm.
+ *
+ */
+
+
+/** space definition */
+sealed trait Space
+
+/** Empty space */
+case object Empty extends Space
+
+/** Space representing the set of all values of a type
+ *
+ * @param tp: the type this space represents
+ * @param decomposed: does the space result from decomposition? Used for pretty print
+ *
+ */
+case class Typ(tp: Type, decomposed: Boolean) extends Space
+
+/** Space representing a constructor pattern */
+case class Kon(tp: Type, params: List[Space]) extends Space
+
+/** Union of spaces */
+case class Or(spaces: List[Space]) extends Space
+
+/** Point in space */
+sealed trait Point extends Space
+
+/** Point representing variables(stable identifier) in patterns */
+case class Var(sym: Symbol, tp: Type) extends Point
+
+/** Point representing literal constants in patterns */
+case class Const(value: Constant, tp: Type) extends Point
+
+/** abstract space logic */
+trait SpaceLogic {
+ /** Is `tp1` a subtype of `tp2`? */
+ def isSubType(tp1: Type, tp2: Type): Boolean
+
+ /** Is `tp1` the same type as `tp2`? */
+ def isEqualType(tp1: Type, tp2: Type): Boolean
+
+ /** Is the type `tp` decomposable? i.e. all values of the type can be covered
+ * by its decomposed types.
+ *
+ * Abstract sealed class, OrType, Boolean and Java enums can be decomposed.
+ */
+ def canDecompose(tp: Type): Boolean
+
+ /** Return term parameter types of the case class `tp` */
+ def signature(tp: Type): List[Type]
+
+ /** Get components of decomposable types */
+ def decompose(tp: Type): List[Space]
+
+ /** Simplify space using the laws, there's no nested union after simplify */
+ def simplify(space: Space): Space = space match {
+ case Kon(tp, spaces) =>
+ val sp = Kon(tp, spaces.map(simplify _))
+ if (sp.params.contains(Empty)) Empty
+ else sp
+ case Or(spaces) =>
+ val set = spaces.map(simplify _).flatMap {
+ case Or(ss) => ss
+ case s => Seq(s)
+ } filter (_ != Empty)
+
+ if (set.isEmpty) Empty
+ else if (set.size == 1) set.toList(0)
+ else Or(set)
+ case Typ(tp, _) =>
+ if (canDecompose(tp) && decompose(tp).isEmpty) Empty
+ else space
+ case _ => space
+ }
+
+ /** Flatten space to get rid of `Or` for pretty print */
+ def flatten(space: Space): List[Space] = space match {
+ case Kon(tp, spaces) =>
+ val flats = spaces.map(flatten _)
+
+ flats.foldLeft(List[Kon]()) { (acc, flat) =>
+ if (acc.isEmpty) flat.map(s => Kon(tp, Nil :+ s))
+ else for (Kon(tp, ss) <- acc; s <- flat) yield Kon(tp, ss :+ s)
+ }
+ case Or(spaces) =>
+ spaces.flatMap(flatten _)
+ case _ => List(space)
+ }
+
+ /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */
+ def isSubspace(a: Space, b: Space): Boolean = {
+ def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) => true
+ case (_, Empty) => false
+ case (Or(ss), _) => ss.forall(isSubspace(_, b))
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ isSubType(tp1, tp2) || tryDecompose1(tp1) || tryDecompose2(tp2)
+ case (Typ(tp1, _), Or(ss)) =>
+ ss.exists(isSubspace(a, _)) || tryDecompose1(tp1)
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ isSubType(tp1, tp2) && isSubspace(Kon(tp2, signature(tp2).map(Typ(_, false))), b) ||
+ tryDecompose1(tp1)
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ isSubType(tp1, tp2) ||
+ simplify(a) == Empty ||
+ (isSubType(tp2, tp1) && tryDecompose1(tp1)) ||
+ tryDecompose2(tp2)
+ case (Kon(_, _), Or(_)) =>
+ simplify(minus(a, b)) == Empty
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ isEqualType(tp1, tp2) && ss1.zip(ss2).forall((isSubspace _).tupled)
+ case (Const(v1, _), Const(v2, _)) => v1 == v2
+ case (Const(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2)
+ case (Const(_, _), Or(ss)) => ss.exists(isSubspace(a, _))
+ case (Const(_, _), _) => false
+ case (_, Const(_, _)) => false
+ case (Var(x, _), Var(y, _)) => x == y
+ case (Var(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2)
+ case (Var(_, _), Or(ss)) => ss.exists(isSubspace(a, _))
+ case (Var(_, _), _) => false
+ case (_, Var(_, _)) => false
+ }
+ }
+
+ /** Intersection of two spaces */
+ def intersect(a: Space, b: Space): Space = {
+ def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) | (_, Empty) => Empty
+ case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filterConserve(_ ne Empty))
+ case (Or(ss), _) => Or(ss.map(intersect(_, b)).filterConserve(_ ne Empty))
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ if (!isEqualType(tp1, tp2)) Empty
+ else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty
+ else Kon(tp1, ss1.zip(ss2).map((intersect _).tupled))
+ case (Const(v1, _), Const(v2, _)) =>
+ if (v1 == v2) a else Empty
+ case (Const(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Const(_, _), _) => Empty
+ case (Typ(tp1, _), Const(_, tp2)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (_, Const(_, _)) => Empty
+ case (Var(x, _), Var(y, _)) =>
+ if (x == y) a else Empty
+ case (Var(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Var(_, _), _) => Empty
+ case (Typ(tp1, _), Var(_, tp2)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (_, Var(_, _)) => Empty
+ }
+ }
+
+ /** The space of a not covered by b */
+ def minus(a: Space, b: Space): Space = {
+ def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) => Empty
+ case (_, Empty) => a
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ // corner case: inheriting a case class
+ // rationale: every instance of `tp1` is covered by `tp2(_)`
+ if (isSubType(tp1, tp2)) minus(Kon(tp2, signature(tp2).map(Typ(_, false))), b)
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else a
+ case (_, Or(ss)) =>
+ ss.foldLeft(a)(minus)
+ case (Or(ss), _) =>
+ Or(ss.map(minus(_, b)))
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ // uncovered corner case: tp2 :< tp1
+ if (isSubType(tp1, tp2)) Empty
+ else if (simplify(a) == Empty) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ if (!isEqualType(tp1, tp2)) a
+ else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) a
+ else if (ss1.zip(ss2).forall((isSubspace _).tupled)) Empty
+ else
+ // `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
+ Or(ss1.zip(ss2).map((minus _).tupled).zip(0 to ss2.length - 1).map {
+ case (ri, i) => Kon(tp1, ss1.updated(i, ri))
+ })
+ case (Const(v1, _), Const(v2, _)) =>
+ if (v1 == v2) Empty else a
+ case (Const(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Const(_, _), _) => a
+ case (Typ(tp1, _), Const(_, tp2)) => // Boolean & Java enum
+ if (canDecompose(tp1)) tryDecompose1(tp1)
+ else a
+ case (_, Const(_, _)) => a
+ case (Var(x, _), Var(y, _)) =>
+ if (x == y) Empty else a
+ case (Var(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Var(_, _), _) => a
+ case (_, Var(_, _)) => a
+ }
+ }
+}
+
+/** Scala implementation of space logic */
+class SpaceEngine(implicit ctx: Context) extends SpaceLogic {
+ import tpd._
+
+ /** Return the space that represents the pattern `pat`
+ *
+ * If roundUp is true, approximate extractors to its type,
+ * otherwise approximate extractors to Empty
+ */
+ def project(pat: Tree, roundUp: Boolean = true)(implicit ctx: Context): Space = pat match {
+ case Literal(c) => Const(c, c.tpe)
+ case _: BackquotedIdent => Var(pat.symbol, pat.tpe)
+ case Ident(_) | Select(_, _) =>
+ pat.tpe.stripAnnots match {
+ case tp: TermRef =>
+ if (pat.symbol.is(Enum))
+ Const(Constant(pat.symbol), tp)
+ else if (tp.underlyingIterator.exists(_.classSymbol.is(Module)))
+ Typ(tp.widenTermRefExpr.stripAnnots, false)
+ else
+ Var(pat.symbol, tp)
+ case tp => Typ(tp, false)
+ }
+ case Alternative(trees) => Or(trees.map(project(_, roundUp)))
+ case Bind(_, pat) => project(pat)
+ case UnApply(_, _, pats) =>
+ if (pat.tpe.classSymbol.is(CaseClass))
+ Kon(pat.tpe.stripAnnots, pats.map(pat => project(pat, roundUp)))
+ else if (roundUp) Typ(pat.tpe.stripAnnots, false)
+ else Empty
+ case Typed(pat @ UnApply(_, _, _), _) => project(pat)
+ case Typed(expr, _) => Typ(expr.tpe.stripAnnots, true)
+ case _ =>
+ Empty
+ }
+
+ /* Erase a type binding according to erasure semantics in pattern matching */
+ def erase(tp: Type): Type = {
+ def doErase(tp: Type): Type = tp match {
+ case tp: HKApply => erase(tp.superType)
+ case tp: RefinedType => erase(tp.parent)
+ case _ => tp
+ }
+
+ tp match {
+ case OrType(tp1, tp2) =>
+ OrType(erase(tp1), erase(tp2))
+ case AndType(tp1, tp2) =>
+ AndType(erase(tp1), erase(tp2))
+ case _ =>
+ val origin = doErase(tp)
+ if (origin =:= defn.ArrayType) tp else origin
+ }
+ }
+
+ /** Is `tp1` a subtype of `tp2`? */
+ def isSubType(tp1: Type, tp2: Type): Boolean = {
+ // check SI-9657 and tests/patmat/gadt.scala
+ erase(tp1) <:< erase(tp2)
+ }
+
+ def isEqualType(tp1: Type, tp2: Type): Boolean = tp1 =:= tp2
+
+ /** Parameter types of the case class type `tp` */
+ def signature(tp: Type): List[Type] = {
+ val ktor = tp.classSymbol.primaryConstructor.info
+
+ val meth = ktor match {
+ case ktor: PolyType =>
+ ktor.instantiate(tp.classSymbol.typeParams.map(_.typeRef)).asSeenFrom(tp, tp.classSymbol)
+ case _ => ktor
+ }
+
+ // refine path-dependent type in params. refer to t9672
+ meth.firstParamTypes.map(_.asSeenFrom(tp, tp.classSymbol))
+ }
+
+ /** Decompose a type into subspaces -- assume the type can be decomposed */
+ def decompose(tp: Type): List[Space] = {
+ val children = tp.classSymbol.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot =>
+ // refer to definition of Annotation.makeChild
+ annot.tree match {
+ case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol
+ }
+ }
+
+ tp match {
+ case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true))
+ case _ if tp =:= ctx.definitions.BooleanType =>
+ List(
+ Const(Constant(true), ctx.definitions.BooleanType),
+ Const(Constant(false), ctx.definitions.BooleanType)
+ )
+ case _ if tp.classSymbol.is(Enum) =>
+ children.map(sym => Const(Constant(sym), tp))
+ case _ =>
+ val parts = children.map { sym =>
+ if (sym.is(ModuleClass))
+ sym.asClass.classInfo.selfType
+ else if (sym.info.typeParams.length > 0 || tp.isInstanceOf[TypeRef])
+ refine(tp, sym.typeRef)
+ else
+ sym.typeRef
+ } filter { tpe =>
+ // Child class may not always be subtype of parent:
+ // GADT & path-dependent types
+ tpe <:< expose(tp)
+ }
+
+ parts.map(Typ(_, true))
+ }
+ }
+
+ /** Refine tp2 based on tp1
+ *
+ * E.g. if `tp1` is `Option[Int]`, `tp2` is `Some`, then return
+ * `Some[Int]`.
+ *
+ * If `tp1` is `path1.A`, `tp2` is `path2.B`, and `path1` is subtype of
+ * `path2`, then return `path1.B`.
+ */
+ def refine(tp1: Type, tp2: Type): Type = (tp1, tp2) match {
+ case (tp1: RefinedType, _) => tp1.wrapIfMember(refine(tp1.parent, tp2))
+ case (tp1: HKApply, _) => refine(tp1.superType, tp2)
+ case (TypeRef(ref1: TypeProxy, _), tp2 @ TypeRef(ref2: TypeProxy, name)) =>
+ if (ref1.underlying <:< ref2.underlying) TypeRef(ref1, name) else tp2
+ case _ => tp2
+ }
+
+ /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */
+ def canDecompose(tp: Type): Boolean = {
+ tp.classSymbol.is(allOf(Abstract, Sealed)) ||
+ tp.classSymbol.is(allOf(Trait, Sealed)) ||
+ tp.isInstanceOf[OrType] ||
+ tp =:= ctx.definitions.BooleanType ||
+ tp.classSymbol.is(Enum)
+ }
+
+ /** Show friendly type name with current scope in mind
+ *
+ * E.g. C.this.B --> B if current owner is C
+ * C.this.x.T --> x.T if current owner is C
+ * X[T] --> X
+ * C --> C if current owner is C !!!
+ *
+ */
+ def showType(tp: Type): String = {
+ val enclosingCls = ctx.owner.enclosingClass.asClass.classInfo.symbolicTypeRef
+
+ def isOmittable(sym: Symbol) =
+ sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName ||
+ ctx.definitions.UnqualifiedOwnerTypes.exists(_.symbol == sym) ||
+ sym.showFullName.startsWith("scala.") ||
+ sym == enclosingCls.typeSymbol
+
+ def refinePrefix(tp: Type): String = tp match {
+ case NoPrefix => ""
+ case tp: NamedType if isOmittable(tp.symbol) => ""
+ case tp: ThisType => refinePrefix(tp.tref)
+ case tp: RefinedType => refinePrefix(tp.parent)
+ case tp: NamedType => tp.name.show.stripSuffix("$")
+ }
+
+ def refine(tp: Type): String = tp match {
+ case tp: RefinedType => refine(tp.parent)
+ case tp: ThisType => refine(tp.tref)
+ case tp: NamedType =>
+ val pre = refinePrefix(tp.prefix)
+ if (tp.name == tpnme.higherKinds) pre
+ else if (pre.isEmpty) tp.name.show.stripSuffix("$")
+ else pre + "." + tp.name.show.stripSuffix("$")
+ case _ => tp.show.stripSuffix("$")
+ }
+
+ val text = tp.stripAnnots match {
+ case tp: OrType => showType(tp.tp1) + " | " + showType(tp.tp2)
+ case tp => refine(tp)
+ }
+
+ if (text.isEmpty) enclosingCls.show.stripSuffix("$")
+ else text
+ }
+
+ /** Display spaces */
+ def show(s: Space): String = {
+ def doShow(s: Space, mergeList: Boolean = false): String = s match {
+ case Empty => ""
+ case Const(v, _) => v.show
+ case Var(x, _) => x.show
+ case Typ(tp, decomposed) =>
+ val sym = tp.widen.classSymbol
+
+ if (sym.is(ModuleClass))
+ showType(tp)
+ else if (ctx.definitions.isTupleType(tp))
+ signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (sym.showFullName == "scala.collection.immutable.::")
+ if (mergeList) "_" else "List(_)"
+ else if (tp.classSymbol.is(CaseClass))
+ // use constructor syntax for case class
+ showType(tp) + signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (signature(tp).nonEmpty)
+ tp.classSymbol.name + signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (decomposed) "_: " + showType(tp)
+ else "_"
+ case Kon(tp, params) =>
+ if (ctx.definitions.isTupleType(tp))
+ "(" + params.map(doShow(_)).mkString(", ") + ")"
+ else if (tp.widen.classSymbol.showFullName == "scala.collection.immutable.::")
+ if (mergeList) params.map(doShow(_, mergeList)).mkString(", ")
+ else params.map(doShow(_, true)).filter(_ != "Nil").mkString("List(", ", ", ")")
+ else
+ showType(tp) + params.map(doShow(_)).mkString("(", ", ", ")")
+ case Or(_) =>
+ throw new Exception("incorrect flatten result " + s)
+ }
+
+ flatten(s).map(doShow(_, false)).distinct.mkString(", ")
+ }
+
+ def checkable(tree: Match): Boolean = {
+ def isCheckable(tp: Type): Boolean = tp match {
+ case AnnotatedType(tp, annot) =>
+ (ctx.definitions.UncheckedAnnot != annot.symbol) && isCheckable(tp)
+ case _ =>
+ // Possible to check everything, but be compatible with scalac by default
+ ctx.settings.YcheckAllPatmat.value ||
+ tp.typeSymbol.is(Sealed) ||
+ tp.isInstanceOf[OrType] ||
+ tp.typeSymbol == ctx.definitions.BooleanType.typeSymbol ||
+ tp.typeSymbol.is(Enum) ||
+ canDecompose(tp) ||
+ (defn.isTupleType(tp) && tp.dealias.argInfos.exists(isCheckable(_)))
+ }
+
+ val Match(sel, cases) = tree
+ isCheckable(sel.tpe.widen.deAnonymize.dealiasKeepAnnots)
+ }
+
+
+ /** Expose refined type to eliminate reference to type variables
+ *
+ * A = B M { type T = A } ~~> M { type T = B }
+ *
+ * A <: X :> Y M { type T = A } ~~> M { type T <: X :> Y }
+ *
+ * A <: X :> Y B <: U :> V M { type T <: A :> B } ~~> M { type T <: X :> V }
+ *
+ * A = X B = Y M { type T <: A :> B } ~~> M { type T <: X :> Y }
+ */
+ def expose(tp: Type): Type = {
+ def follow(tp: Type, up: Boolean): Type = tp match {
+ case tp: TypeProxy =>
+ tp.underlying match {
+ case TypeBounds(lo, hi) =>
+ follow(if (up) hi else lo, up)
+ case _ =>
+ tp
+ }
+ case OrType(tp1, tp2) =>
+ OrType(follow(tp1, up), follow(tp2, up))
+ case AndType(tp1, tp2) =>
+ AndType(follow(tp1, up), follow(tp2, up))
+ }
+
+ tp match {
+ case tp: RefinedType =>
+ tp.refinedInfo match {
+ case tpa : TypeAlias =>
+ val hi = follow(tpa.alias, true)
+ val lo = follow(tpa.alias, false)
+ val refined = if (hi =:= lo)
+ tpa.derivedTypeAlias(hi)
+ else
+ tpa.derivedTypeBounds(lo, hi)
+
+ tp.derivedRefinedType(
+ expose(tp.parent),
+ tp.refinedName,
+ refined
+ )
+ case tpb @ TypeBounds(lo, hi) =>
+ tp.derivedRefinedType(
+ expose(tp.parent),
+ tp.refinedName,
+ tpb.derivedTypeBounds(follow(lo, false), follow(hi, true))
+ )
+ }
+ case _ => tp
+ }
+ }
+
+ def checkExhaustivity(_match: Match): Unit = {
+ val Match(sel, cases) = _match
+ val selTyp = sel.tpe.widen.deAnonymize.dealias
+
+
+ val patternSpace = cases.map(x => project(x.pat)).reduce((a, b) => Or(List(a, b)))
+ val uncovered = simplify(minus(Typ(selTyp, true), patternSpace))
+
+ if (uncovered != Empty)
+ ctx.warning(PatternMatchExhaustivity(show(uncovered)), _match.pos)
+ }
+
+ def checkRedundancy(_match: Match): Unit = {
+ val Match(sel, cases) = _match
+ // ignore selector type for now
+ // val selTyp = sel.tpe.widen.deAnonymize.dealias
+
+ // starts from the second, the first can't be redundant
+ (1 until cases.length).foreach { i =>
+ // in redundancy check, take guard as false, take extractor as match
+ // nothing in order to soundly approximate
+ val prevs = cases.take(i).map { x =>
+ if (x.guard.isEmpty) project(x.pat, false)
+ else Empty
+ }.reduce((a, b) => Or(List(a, b)))
+
+ val curr = project(cases(i).pat)
+
+ if (isSubspace(curr, prevs)) {
+ ctx.warning(MatchCaseUnreachable(), cases(i).body.pos)
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala
new file mode 100644
index 000000000..6c398cd72
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala
@@ -0,0 +1,1351 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast.{Trees, untpd, tpd, TreeInfo}
+import util.Positions._
+import util.Stats.track
+import Trees.Untyped
+import Mode.ImplicitsEnabled
+import Contexts._
+import Flags._
+import Denotations._
+import NameOps._
+import Symbols._
+import Types._
+import Decorators._
+import ErrorReporting._
+import Trees._
+import config.Config
+import Names._
+import StdNames._
+import ProtoTypes._
+import EtaExpansion._
+import Inferencing._
+import collection.mutable
+import config.Printers.{typr, unapp, overload}
+import TypeApplications._
+import language.implicitConversions
+import reporting.diagnostic.Message
+
+object Applications {
+ import tpd._
+
+ def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx: Context) = {
+ val ref = tp.member(name).suchThat(_.info.isParameterless)
+ if (ref.isOverloaded)
+ errorType(i"Overloaded reference to $ref is not allowed in extractor", errorPos)
+ else if (ref.info.isInstanceOf[PolyType])
+ errorType(i"Reference to polymorphic $ref: ${ref.info} is not allowed in extractor", errorPos)
+ else
+ ref.info.widenExpr.dealias
+ }
+
+ def productSelectorTypes(tp: Type, errorPos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
+ val sels = for (n <- Iterator.from(0)) yield extractorMemberType(tp, nme.selectorName(n), errorPos)
+ sels.takeWhile(_.exists).toList
+ }
+
+ def productSelectors(tp: Type)(implicit ctx: Context): List[Symbol] = {
+ val sels = for (n <- Iterator.from(0)) yield tp.member(nme.selectorName(n)).symbol
+ sels.takeWhile(_.exists).toList
+ }
+
+ def getUnapplySelectors(tp: Type, args: List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] =
+ if (args.length > 1 && !(tp.derivesFrom(defn.SeqClass))) {
+ val sels = productSelectorTypes(tp, pos)
+ if (sels.length == args.length) sels
+ else tp :: Nil
+ } else tp :: Nil
+
+ def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
+
+ def seqSelector = defn.RepeatedParamType.appliedTo(unapplyResult.elemType :: Nil)
+ def getTp = extractorMemberType(unapplyResult, nme.get, pos)
+
+ // println(s"unapply $unapplyResult ${extractorMemberType(unapplyResult, nme.isDefined)}")
+ if (extractorMemberType(unapplyResult, nme.isDefined, pos) isRef defn.BooleanClass) {
+ if (getTp.exists)
+ if (unapplyFn.symbol.name == nme.unapplySeq) {
+ val seqArg = boundsToHi(getTp.elemType)
+ if (seqArg.exists) return args map Function.const(seqArg)
+ }
+ else return getUnapplySelectors(getTp, args, pos)
+ else if (defn.isProductSubType(unapplyResult)) return productSelectorTypes(unapplyResult, pos)
+ }
+ if (unapplyResult derivesFrom defn.SeqClass) seqSelector :: Nil
+ else if (unapplyResult isRef defn.BooleanClass) Nil
+ else {
+ ctx.error(i"$unapplyResult is not a valid result type of an unapply method of an extractor", pos)
+ Nil
+ }
+ }
+
+ def wrapDefs(defs: mutable.ListBuffer[Tree], tree: Tree)(implicit ctx: Context): Tree =
+ if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree
+}
+
+import Applications._
+
+trait Applications extends Compatibility { self: Typer with Dynamic =>
+
+ import Applications._
+ import tpd.{ cpy => _, _ }
+ import untpd.cpy
+ import Dynamic.isDynamicMethod
+
+ /** @tparam Arg the type of arguments, could be tpd.Tree, untpd.Tree, or Type
+ * @param methRef the reference to the method of the application
+ * @param funType the type of the function part of the application
+ * @param args the arguments of the application
+ * @param resultType the expected result type of the application
+ */
+ abstract class Application[Arg](methRef: TermRef, funType: Type, args: List[Arg], resultType: Type)(implicit ctx: Context) {
+
+ /** The type of typed arguments: either tpd.Tree or Type */
+ type TypedArg
+
+ /** Given an original argument and the type of the corresponding formal
+ * parameter, produce a typed argument.
+ */
+ protected def typedArg(arg: Arg, formal: Type): TypedArg
+
+ /** Turn a typed tree into an argument */
+ protected def treeToArg(arg: Tree): Arg
+
+ /** Check that argument corresponds to type `formal` and
+ * possibly add it to the list of adapted arguments
+ */
+ protected def addArg(arg: TypedArg, formal: Type): Unit
+
+ /** Is this an argument of the form `expr: _*` or a RepeatedParamType
+ * derived from such an argument?
+ */
+ protected def isVarArg(arg: Arg): Boolean
+
+ /** If constructing trees, turn last `n` processed arguments into a
+ * `SeqLiteral` tree with element type `elemFormal`.
+ */
+ protected def makeVarArg(n: Int, elemFormal: Type): Unit
+
+ /** If all `args` have primitive numeric types, make sure it's the same one */
+ protected def harmonizeArgs(args: List[TypedArg]): List[TypedArg]
+
+ /** Signal failure with given message at position of given argument */
+ protected def fail(msg: => Message, arg: Arg): Unit
+
+ /** Signal failure with given message at position of the application itself */
+ protected def fail(msg: => Message): Unit
+
+ protected def appPos: Position
+
+ /** The current function part, which might be affected by lifting.
+ */
+ protected def normalizedFun: Tree
+
+ /** If constructing trees, pull out all parts of the function
+ * which are not idempotent into separate prefix definitions
+ */
+ protected def liftFun(): Unit = ()
+
+ /** A flag signalling that the typechecking the application was so far successful */
+ private[this] var _ok = true
+
+ def ok = _ok
+ def ok_=(x: Boolean) = {
+ assert(x || ctx.reporter.errorsReported || !ctx.typerState.isCommittable) // !!! DEBUG
+ _ok = x
+ }
+
+ /** The function's type after widening and instantiating polytypes
+ * with polyparams in constraint set
+ */
+ val methType = funType.widen match {
+ case funType: MethodType => funType
+ case funType: PolyType => constrained(funType).resultType
+ case tp => tp //was: funType
+ }
+
+ /** The arguments re-ordered so that each named argument matches the
+ * same-named formal parameter.
+ */
+ lazy val orderedArgs =
+ if (hasNamedArg(args))
+ reorder(args.asInstanceOf[List[untpd.Tree]]).asInstanceOf[List[Arg]]
+ else
+ args
+
+ protected def init() = methType match {
+ case methType: MethodType =>
+ // apply the result type constraint, unless method type is dependent
+ if (!methType.isDependent) {
+ val savedConstraint = ctx.typerState.constraint
+ if (!constrainResult(methType.resultType, resultType))
+ if (ctx.typerState.isCommittable)
+ // defer the problem until after the application;
+ // it might be healed by an implicit conversion
+ assert(ctx.typerState.constraint eq savedConstraint)
+ else
+ fail(err.typeMismatchMsg(methType.resultType, resultType))
+ }
+ // match all arguments with corresponding formal parameters
+ matchArgs(orderedArgs, methType.paramTypes, 0)
+ case _ =>
+ if (methType.isError) ok = false
+ else fail(s"$methString does not take parameters")
+ }
+
+ /** The application was successful */
+ def success = ok
+
+ protected def methodType = methType.asInstanceOf[MethodType]
+ private def methString: String = i"${methRef.symbol}: ${methType.show}"
+
+ /** Re-order arguments to correctly align named arguments */
+ def reorder[T >: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = {
+
+ /** @param pnames The list of parameter names that are missing arguments
+ * @param args The list of arguments that are not yet passed, or that are waiting to be dropped
+ * @param nameToArg A map from as yet unseen names to named arguments
+ * @param toDrop A set of names that have already be passed as named arguments
+ *
+ * For a well-typed application we have the invariants
+ *
+ * 1. `(args diff toDrop)` can be reordered to match `pnames`
+ * 2. For every `(name -> arg)` in `nameToArg`, `arg` is an element of `args`
+ */
+ def recur(pnames: List[Name], args: List[Trees.Tree[T]],
+ nameToArg: Map[Name, Trees.NamedArg[T]], toDrop: Set[Name]): List[Trees.Tree[T]] = pnames match {
+ case pname :: pnames1 if nameToArg contains pname =>
+ // there is a named argument for this parameter; pick it
+ nameToArg(pname) :: recur(pnames1, args, nameToArg - pname, toDrop + pname)
+ case _ =>
+ def pnamesRest = if (pnames.isEmpty) pnames else pnames.tail
+ args match {
+ case (arg @ NamedArg(aname, _)) :: args1 =>
+ if (toDrop contains aname) // argument is already passed
+ recur(pnames, args1, nameToArg, toDrop - aname)
+ else if ((nameToArg contains aname) && pnames.nonEmpty) // argument is missing, pass an empty tree
+ genericEmptyTree :: recur(pnames.tail, args, nameToArg, toDrop)
+ else { // name not (or no longer) available for named arg
+ def msg =
+ if (methodType.paramNames contains aname)
+ s"parameter $aname of $methString is already instantiated"
+ else
+ s"$methString does not have a parameter $aname"
+ fail(msg, arg.asInstanceOf[Arg])
+ arg :: recur(pnamesRest, args1, nameToArg, toDrop)
+ }
+ case arg :: args1 =>
+ arg :: recur(pnamesRest, args1, nameToArg, toDrop) // unnamed argument; pick it
+ case Nil => // no more args, continue to pick up any preceding named args
+ if (pnames.isEmpty) Nil
+ else recur(pnamesRest, args, nameToArg, toDrop)
+ }
+ }
+ val nameAssocs = for (arg @ NamedArg(name, _) <- args) yield (name, arg)
+ recur(methodType.paramNames, args, nameAssocs.toMap, Set())
+ }
+
+ /** Splice new method reference into existing application */
+ def spliceMeth(meth: Tree, app: Tree): Tree = app match {
+ case Apply(fn, args) => Apply(spliceMeth(meth, fn), args)
+ case TypeApply(fn, targs) => TypeApply(spliceMeth(meth, fn), targs)
+ case _ => meth
+ }
+
+ /** Find reference to default parameter getter for parameter #n in current
+ * parameter list, or NoType if none was found
+ */
+ def findDefaultGetter(n: Int)(implicit ctx: Context): Tree = {
+ val meth = methRef.symbol.asTerm
+ val receiver: Tree = methPart(normalizedFun) match {
+ case Select(receiver, _) => receiver
+ case mr => mr.tpe.normalizedPrefix match {
+ case mr: TermRef => ref(mr)
+ case mr =>
+ if (this.isInstanceOf[TestApplication[_]])
+ // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call.
+ ref(mr.narrow)
+ else
+ EmptyTree
+ }
+ }
+ val getterPrefix =
+ if ((meth is Synthetic) && meth.name == nme.apply) nme.CONSTRUCTOR else meth.name
+ def getterName = getterPrefix.defaultGetterName(n)
+ if (!meth.hasDefaultParams)
+ EmptyTree
+ else if (receiver.isEmpty) {
+ def findGetter(cx: Context): Tree = {
+ if (cx eq NoContext) EmptyTree
+ else if (cx.scope != cx.outer.scope &&
+ cx.denotNamed(meth.name).hasAltWith(_.symbol == meth)) {
+ val denot = cx.denotNamed(getterName)
+ assert(denot.exists, s"non-existent getter denotation ($denot) for getter($getterName)")
+ ref(TermRef(cx.owner.thisType, getterName, denot))
+ } else findGetter(cx.outer)
+ }
+ findGetter(ctx)
+ }
+ else {
+ def selectGetter(qual: Tree): Tree = {
+ val getterDenot = qual.tpe.member(getterName)
+ if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot))
+ else EmptyTree
+ }
+ if (!meth.isClassConstructor)
+ selectGetter(receiver)
+ else {
+ // default getters for class constructors are found in the companion object
+ val cls = meth.owner
+ val companion = cls.companionModule
+ receiver.tpe.baseTypeRef(cls) match {
+ case tp: TypeRef if companion.isTerm =>
+ selectGetter(ref(TermRef(tp.prefix, companion.asTerm)))
+ case _ =>
+ EmptyTree
+ }
+ }
+ }
+ }
+
+ /** Match re-ordered arguments against formal parameters
+ * @param n The position of the first parameter in formals in `methType`.
+ */
+ def matchArgs(args: List[Arg], formals: List[Type], n: Int): Unit = {
+ if (success) formals match {
+ case formal :: formals1 =>
+
+ def addTyped(arg: Arg, formal: Type) =
+ addArg(typedArg(arg, formal), formal)
+
+ def missingArg(n: Int): Unit = {
+ val pname = methodType.paramNames(n)
+ fail(
+ if (pname contains '$') s"not enough arguments for $methString"
+ else s"missing argument for parameter $pname of $methString")
+ }
+
+ def tryDefault(n: Int, args1: List[Arg]): Unit = {
+ liftFun()
+ val getter = findDefaultGetter(n + numArgs(normalizedFun))
+ if (getter.isEmpty) missingArg(n)
+ else {
+ addTyped(treeToArg(spliceMeth(getter withPos appPos, normalizedFun)), formal)
+ matchArgs(args1, formals1, n + 1)
+ }
+ }
+
+ if (formal.isRepeatedParam)
+ args match {
+ case arg :: Nil if isVarArg(arg) =>
+ addTyped(arg, formal)
+ case _ =>
+ val elemFormal = formal.widenExpr.argTypesLo.head
+ val origConstraint = ctx.typerState.constraint
+ var typedArgs = args.map(typedArg(_, elemFormal))
+ val harmonizedArgs = harmonizeArgs(typedArgs)
+ if (harmonizedArgs ne typedArgs) {
+ ctx.typerState.constraint = origConstraint
+ typedArgs = harmonizedArgs
+ }
+ typedArgs.foreach(addArg(_, elemFormal))
+ makeVarArg(args.length, elemFormal)
+ }
+ else args match {
+ case EmptyTree :: args1 =>
+ tryDefault(n, args1)
+ case arg :: args1 =>
+ addTyped(arg, formal)
+ matchArgs(args1, formals1, n + 1)
+ case nil =>
+ tryDefault(n, args)
+ }
+
+ case nil =>
+ args match {
+ case arg :: args1 => fail(s"too many arguments for $methString", arg)
+ case nil =>
+ }
+ }
+ }
+ }
+
+ /** Subclass of Application for the cases where we are interested only
+ * in a "can/cannot apply" answer, without needing to construct trees or
+ * issue error messages.
+ */
+ abstract class TestApplication[Arg](methRef: TermRef, funType: Type, args: List[Arg], resultType: Type)(implicit ctx: Context)
+ extends Application[Arg](methRef, funType, args, resultType) {
+ type TypedArg = Arg
+ type Result = Unit
+
+ /** The type of the given argument */
+ protected def argType(arg: Arg, formal: Type): Type
+
+ def typedArg(arg: Arg, formal: Type): Arg = arg
+ def addArg(arg: TypedArg, formal: Type) =
+ ok = ok & isCompatible(argType(arg, formal), formal)
+ def makeVarArg(n: Int, elemFormal: Type) = {}
+ def fail(msg: => Message, arg: Arg) =
+ ok = false
+ def fail(msg: => Message) =
+ ok = false
+ def appPos = NoPosition
+ lazy val normalizedFun = ref(methRef)
+ init()
+ }
+
+ /** Subclass of Application for applicability tests with type arguments and value
+ * argument trees.
+ */
+ class ApplicableToTrees(methRef: TermRef, targs: List[Type], args: List[Tree], resultType: Type)(implicit ctx: Context)
+ extends TestApplication(methRef, methRef.widen.appliedTo(targs), args, resultType) {
+ def argType(arg: Tree, formal: Type): Type = normalize(arg.tpe, formal)
+ def treeToArg(arg: Tree): Tree = arg
+ def isVarArg(arg: Tree): Boolean = tpd.isWildcardStarArg(arg)
+ def harmonizeArgs(args: List[Tree]) = harmonize(args)
+ }
+
+ /** Subclass of Application for applicability tests with type arguments and value
+ * argument trees.
+ */
+ class ApplicableToTreesDirectly(methRef: TermRef, targs: List[Type], args: List[Tree], resultType: Type)(implicit ctx: Context) extends ApplicableToTrees(methRef, targs, args, resultType)(ctx) {
+ override def addArg(arg: TypedArg, formal: Type) =
+ ok = ok & (argType(arg, formal) <:< formal)
+ }
+
+ /** Subclass of Application for applicability tests with value argument types. */
+ class ApplicableToTypes(methRef: TermRef, args: List[Type], resultType: Type)(implicit ctx: Context)
+ extends TestApplication(methRef, methRef, args, resultType) {
+ def argType(arg: Type, formal: Type): Type = arg
+ def treeToArg(arg: Tree): Type = arg.tpe
+ def isVarArg(arg: Type): Boolean = arg.isRepeatedParam
+ def harmonizeArgs(args: List[Type]) = harmonizeTypes(args)
+ }
+
+ /** Subclass of Application for type checking an Apply node, where
+ * types of arguments are either known or unknown.
+ */
+ abstract class TypedApply[T >: Untyped](
+ app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Trees.Tree[T]], resultType: Type)(implicit ctx: Context)
+ extends Application(methRef, fun.tpe, args, resultType) {
+ type TypedArg = Tree
+ def isVarArg(arg: Trees.Tree[T]): Boolean = untpd.isWildcardStarArg(arg)
+ private var typedArgBuf = new mutable.ListBuffer[Tree]
+ private var liftedDefs: mutable.ListBuffer[Tree] = null
+ private var myNormalizedFun: Tree = fun
+ init()
+
+ def addArg(arg: Tree, formal: Type): Unit =
+ typedArgBuf += adaptInterpolated(arg, formal.widenExpr, EmptyTree)
+
+ def makeVarArg(n: Int, elemFormal: Type): Unit = {
+ val args = typedArgBuf.takeRight(n).toList
+ typedArgBuf.trimEnd(n)
+ val elemtpt = TypeTree(elemFormal)
+ val seqLit =
+ if (methodType.isJava) JavaSeqLiteral(args, elemtpt)
+ else SeqLiteral(args, elemtpt)
+ typedArgBuf += seqToRepeated(seqLit)
+ }
+
+ def harmonizeArgs(args: List[TypedArg]) = harmonize(args)
+
+ override def appPos = app.pos
+
+ def fail(msg: => Message, arg: Trees.Tree[T]) = {
+ ctx.error(msg, arg.pos)
+ ok = false
+ }
+
+ def fail(msg: => Message) = {
+ ctx.error(msg, app.pos)
+ ok = false
+ }
+
+ def normalizedFun = myNormalizedFun
+
+ override def liftFun(): Unit =
+ if (liftedDefs == null) {
+ liftedDefs = new mutable.ListBuffer[Tree]
+ myNormalizedFun = liftApp(liftedDefs, myNormalizedFun)
+ }
+
+ /** The index of the first difference between lists of trees `xs` and `ys`,
+ * where `EmptyTree`s in the second list are skipped.
+ * -1 if there are no differences.
+ */
+ private def firstDiff[T <: Trees.Tree[_]](xs: List[T], ys: List[T], n: Int = 0): Int = xs match {
+ case x :: xs1 =>
+ ys match {
+ case EmptyTree :: ys1 => firstDiff(xs1, ys1, n)
+ case y :: ys1 => if (x ne y) n else firstDiff(xs1, ys1, n + 1)
+ case nil => n
+ }
+ case nil =>
+ ys match {
+ case EmptyTree :: ys1 => firstDiff(xs, ys1, n)
+ case y :: ys1 => n
+ case nil => -1
+ }
+ }
+ private def sameSeq[T <: Trees.Tree[_]](xs: List[T], ys: List[T]): Boolean = firstDiff(xs, ys) < 0
+
+ val result = {
+ var typedArgs = typedArgBuf.toList
+ def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later
+ val app1 =
+ if (!success) app0.withType(ErrorType)
+ else {
+ if (!sameSeq(args, orderedArgs)) {
+ // need to lift arguments to maintain evaluation order in the
+ // presence of argument reorderings.
+ liftFun()
+ val eqSuffixLength = firstDiff(app.args.reverse, orderedArgs.reverse)
+ val (liftable, rest) = typedArgs splitAt (typedArgs.length - eqSuffixLength)
+ typedArgs = liftArgs(liftedDefs, methType, liftable) ++ rest
+ }
+ if (sameSeq(typedArgs, args)) // trick to cut down on tree copying
+ typedArgs = args.asInstanceOf[List[Tree]]
+ assignType(app0, normalizedFun, typedArgs)
+ }
+ wrapDefs(liftedDefs, app1)
+ }
+ }
+
+ /** Subclass of Application for type checking an Apply node with untyped arguments. */
+ class ApplyToUntyped(app: untpd.Apply, fun: Tree, methRef: TermRef, proto: FunProto, resultType: Type)(implicit ctx: Context)
+ extends TypedApply(app, fun, methRef, proto.args, resultType) {
+ def typedArg(arg: untpd.Tree, formal: Type): TypedArg = proto.typedArg(arg, formal.widenExpr)
+ def treeToArg(arg: Tree): untpd.Tree = untpd.TypedSplice(arg)
+ }
+
+ /** Subclass of Application for type checking an Apply node with typed arguments. */
+ class ApplyToTyped(app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Tree], resultType: Type)(implicit ctx: Context)
+ extends TypedApply[Type](app, fun, methRef, args, resultType) {
+ // Dotty deviation: Dotc infers Untyped for the supercall. This seems to be according to the rules
+ // (of both Scala and Dotty). Untyped is legal, and a subtype of Typed, whereas TypeApply
+ // is invariant in the type parameter, so the minimal type should be inferred. But then typedArg does
+ // not match the abstract method in Application and an abstract class error results.
+ def typedArg(arg: tpd.Tree, formal: Type): TypedArg = arg
+ def treeToArg(arg: Tree): Tree = arg
+ }
+
+ /** If `app` is a `this(...)` constructor call, the this-call argument context,
+ * otherwise the current context.
+ */
+ def argCtx(app: untpd.Tree)(implicit ctx: Context): Context =
+ if (untpd.isSelfConstrCall(app)) ctx.thisCallArgContext else ctx
+
+ def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
+
+ def realApply(implicit ctx: Context): Tree = track("realApply") {
+ val originalProto = new FunProto(tree.args, IgnoredProto(pt), this)(argCtx(tree))
+ val fun1 = typedExpr(tree.fun, originalProto)
+
+ // Warning: The following lines are dirty and fragile. We record that auto-tupling was demanded as
+ // a side effect in adapt. If it was, we assume the tupled proto-type in the rest of the application,
+ // until, possibly, we have to fall back to insert an implicit on the qualifier.
+ // This crucially relies on he fact that `proto` is used only in a single call of `adapt`,
+ // otherwise we would get possible cross-talk between different `adapt` calls using the same
+ // prototype. A cleaner alternative would be to return a modified prototype from `adapt` together with
+ // a modified tree but this would be more convoluted and less efficient.
+ val proto = if (originalProto.isTupled) originalProto.tupled else originalProto
+
+ // If some of the application's arguments are function literals without explicitly declared
+ // parameter types, relate the normalized result type of the application with the
+ // expected type through `constrainResult`. This can add more constraints which
+ // help sharpen the inferred parameter types for the argument function literal(s).
+ // This tweak is needed to make i1378 compile.
+ if (tree.args.exists(untpd.isFunctionWithUnknownParamType(_)))
+ if (!constrainResult(fun1.tpe.widen, proto.derivedFunProto(resultType = pt)))
+ typr.println(i"result failure for $tree with type ${fun1.tpe.widen}, expected = $pt")
+
+ /** Type application where arguments come from prototype, and no implicits are inserted */
+ def simpleApply(fun1: Tree, proto: FunProto)(implicit ctx: Context): Tree =
+ methPart(fun1).tpe match {
+ case funRef: TermRef =>
+ val app =
+ if (proto.allArgTypesAreCurrent())
+ new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt)
+ else
+ new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx(tree))
+ convertNewGenericArray(ConstFold(app.result))
+ case _ =>
+ handleUnexpectedFunType(tree, fun1)
+ }
+
+ /** Try same application with an implicit inserted around the qualifier of the function
+ * part. Return an optional value to indicate success.
+ */
+ def tryWithImplicitOnQualifier(fun1: Tree, proto: FunProto)(implicit ctx: Context): Option[Tree] =
+ tryInsertImplicitOnQualifier(fun1, proto) flatMap { fun2 =>
+ tryEither {
+ implicit ctx => Some(simpleApply(fun2, proto)): Option[Tree]
+ } {
+ (_, _) => None
+ }
+ }
+
+ fun1.tpe match {
+ case ErrorType => untpd.cpy.Apply(tree)(fun1, tree.args).withType(ErrorType)
+ case TryDynamicCallType => typedDynamicApply(tree, pt)
+ case _ =>
+ tryEither {
+ implicit ctx => simpleApply(fun1, proto)
+ } {
+ (failedVal, failedState) =>
+ def fail = { failedState.commit(); failedVal }
+ // Try once with original prototype and once (if different) with tupled one.
+ // The reason we need to try both is that the decision whether to use tupled
+ // or not was already taken but might have to be revised when an implicit
+ // is inserted on the qualifier.
+ tryWithImplicitOnQualifier(fun1, originalProto).getOrElse(
+ if (proto eq originalProto) fail
+ else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail))
+ }
+ }
+ }
+
+ /** Convert expression like
+ *
+ * e += (args)
+ *
+ * where the lifted-for-assignment version of e is { val xs = es; e' } to
+ *
+ * { val xs = es; e' = e' + args }
+ */
+ def typedOpAssign: Tree = track("typedOpAssign") {
+ val Apply(Select(lhs, name), rhss) = tree
+ val lhs1 = typedExpr(lhs)
+ val liftedDefs = new mutable.ListBuffer[Tree]
+ val lhs2 = untpd.TypedSplice(liftAssigned(liftedDefs, lhs1))
+ val assign = untpd.Assign(lhs2, untpd.Apply(untpd.Select(lhs2, name.init), rhss))
+ wrapDefs(liftedDefs, typed(assign))
+ }
+
+ if (untpd.isOpAssign(tree))
+ tryEither {
+ implicit ctx => realApply
+ } { (failedVal, failedState) =>
+ tryEither {
+ implicit ctx => typedOpAssign
+ } { (_, _) =>
+ failedState.commit()
+ failedVal
+ }
+ }
+ else {
+ val app = realApply
+ app match {
+ case Apply(fn @ Select(left, _), right :: Nil) if fn.hasType =>
+ val op = fn.symbol
+ if (op == defn.Any_== || op == defn.Any_!=)
+ checkCanEqual(left.tpe.widen, right.tpe.widen, app.pos)
+ case _ =>
+ }
+ app
+ }
+ }
+
+ /** Overridden in ReTyper to handle primitive operations that can be generated after erasure */
+ protected def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(implicit ctx: Context): Tree =
+ throw new Error(i"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}")
+
+ def typedNamedArgs(args: List[untpd.Tree])(implicit ctx: Context) =
+ for (arg @ NamedArg(id, argtpt) <- args) yield {
+ val argtpt1 = typedType(argtpt)
+ cpy.NamedArg(arg)(id, argtpt1).withType(argtpt1.tpe)
+ }
+
+ def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context): Tree = track("typedTypeApply") {
+ val isNamed = hasNamedArg(tree.args)
+ val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_))
+ val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt))
+ typedFn.tpe.widen match {
+ case pt: PolyType =>
+ if (typedArgs.length <= pt.paramBounds.length && !isNamed)
+ if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) {
+ val arg = typedArgs.head
+ checkClassType(arg.tpe, arg.pos, traitReq = false, stablePrefixReq = false)
+ }
+ case _ =>
+ }
+ def tryDynamicTypeApply(): Tree = typedFn match {
+ case typedFn: Select if !pt.isInstanceOf[FunProto] => typedDynamicSelect(typedFn, typedArgs, pt)
+ case _ => tree.withType(TryDynamicCallType)
+ }
+ if (typedFn.tpe eq TryDynamicCallType) tryDynamicTypeApply()
+ else assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)
+ }
+
+ /** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray.
+ * It is performed during typer as creation of generic arrays needs a classTag.
+ * we rely on implicit search to find one.
+ */
+ def convertNewGenericArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match {
+ case Apply(TypeApply(tycon, targs@(targ :: Nil)), args) if tycon.symbol == defn.ArrayConstructor =>
+ fullyDefinedType(tree.tpe, "array", tree.pos)
+
+ def newGenericArrayCall =
+ ref(defn.DottyArraysModule)
+ .select(defn.newGenericArrayMethod).withPos(tree.pos)
+ .appliedToTypeTrees(targs).appliedToArgs(args)
+
+ if (TypeErasure.isUnboundedGeneric(targ.tpe))
+ newGenericArrayCall
+ else tree
+ case _ =>
+ tree
+ }
+
+ def typedUnApply(tree: untpd.Apply, selType: Type)(implicit ctx: Context): Tree = track("typedUnApply") {
+ val Apply(qual, args) = tree
+
+ def notAnExtractor(tree: Tree) =
+ errorTree(tree, s"${qual.show} cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method")
+
+ /** If this is a term ref tree, try to typecheck with its type name.
+ * If this refers to a type alias, follow the alias, and if
+ * one finds a class, reference the class companion module.
+ */
+ def followTypeAlias(tree: untpd.Tree): untpd.Tree = {
+ tree match {
+ case tree: untpd.RefTree =>
+ val ttree = typedType(untpd.rename(tree, tree.name.toTypeName))
+ ttree.tpe match {
+ case alias: TypeRef if alias.info.isAlias =>
+ companionRef(alias) match {
+ case companion: TermRef => return untpd.ref(companion) withPos tree.pos
+ case _ =>
+ }
+ case _ =>
+ }
+ case _ =>
+ }
+ untpd.EmptyTree
+ }
+
+ /** A typed qual.unapply or qual.unapplySeq tree, if this typechecks.
+ * Otherwise fallBack with (maltyped) qual.unapply as argument
+ * Note: requires special handling for overloaded occurrences of
+ * unapply or unapplySeq. We first try to find a non-overloaded
+ * method which matches any type. If that fails, we try to find an
+ * overloaded variant which matches one of the argument types.
+ * In fact, overloaded unapply's are problematic because a non-
+ * overloaded unapply does *not* need to be applicable to its argument
+ * whereas overloaded variants need to have a conforming variant.
+ */
+ def trySelectUnapply(qual: untpd.Tree)(fallBack: Tree => Tree): Tree = {
+ val genericProto = new UnapplyFunProto(WildcardType, this)
+ def specificProto = new UnapplyFunProto(selType, this)
+ // try first for non-overloaded, then for overloaded ocurrences
+ def tryWithName(name: TermName)(fallBack: Tree => Tree)(implicit ctx: Context): Tree =
+ tryEither {
+ implicit ctx => typedExpr(untpd.Select(qual, name), specificProto)
+ } {
+ (sel, _) =>
+ tryEither {
+ implicit ctx => typedExpr(untpd.Select(qual, name), genericProto)
+ } {
+ (_, _) => fallBack(sel)
+ }
+ }
+ // try first for unapply, then for unapplySeq
+ tryWithName(nme.unapply) {
+ sel => tryWithName(nme.unapplySeq)(_ => fallBack(sel)) // for backwards compatibility; will be dropped
+ }
+ }
+
+ /** Produce a typed qual.unapply or qual.unapplySeq tree, or
+ * else if this fails follow a type alias and try again.
+ */
+ val unapplyFn = trySelectUnapply(qual) { sel =>
+ val qual1 = followTypeAlias(qual)
+ if (qual1.isEmpty) notAnExtractor(sel)
+ else trySelectUnapply(qual1)(_ => notAnExtractor(sel))
+ }
+
+ def fromScala2x = unapplyFn.symbol.exists && (unapplyFn.symbol.owner is Scala2x)
+
+ /** Is `subtp` a subtype of `tp` or of some generalization of `tp`?
+ * The generalizations of a type T are the smallest set G such that
+ *
+ * - T is in G
+ * - If a typeref R in G represents a class or trait, R's superclass is in G.
+ * - If a type proxy P is not a reference to a class, P's supertype is in G
+ */
+ def isSubTypeOfParent(subtp: Type, tp: Type)(implicit ctx: Context): Boolean =
+ if (subtp <:< tp) true
+ else tp match {
+ case tp: TypeRef if tp.symbol.isClass => isSubTypeOfParent(subtp, tp.firstParent)
+ case tp: TypeProxy => isSubTypeOfParent(subtp, tp.superType)
+ case _ => false
+ }
+
+ unapplyFn.tpe.widen match {
+ case mt: MethodType if mt.paramTypes.length == 1 =>
+ val unapplyArgType = mt.paramTypes.head
+ unapp.println(i"unapp arg tpe = $unapplyArgType, pt = $selType")
+ val ownType =
+ if (selType <:< unapplyArgType) {
+ unapp.println(i"case 1 $unapplyArgType ${ctx.typerState.constraint}")
+ selType
+ } else if (isSubTypeOfParent(unapplyArgType, selType)(ctx.addMode(Mode.GADTflexible))) {
+ maximizeType(unapplyArgType) match {
+ case Some(tvar) =>
+ def msg =
+ ex"""There is no best instantiation of pattern type $unapplyArgType
+ |that makes it a subtype of selector type $selType.
+ |Non-variant type variable ${tvar.origin} cannot be uniquely instantiated."""
+ if (fromScala2x) {
+ // We can't issue an error here, because in Scala 2, ::[B] is invariant
+ // whereas List[+T] is covariant. According to the strict rule, a pattern
+ // match of a List[C] against a case x :: xs is illegal, because
+ // B cannot be uniquely instantiated. Of course :: should have been
+ // covariant in the first place, but in the Scala libraries it isn't.
+ // So for now we allow these kinds of patterns, even though they
+ // can open unsoundness holes. See SI-7952 for an example of the hole this opens.
+ if (ctx.settings.verbose.value) ctx.warning(msg, tree.pos)
+ } else {
+ unapp.println(s" ${unapplyFn.symbol.owner} ${unapplyFn.symbol.owner is Scala2x}")
+ ctx.strictWarning(msg, tree.pos)
+ }
+ case _ =>
+ }
+ unapp.println(i"case 2 $unapplyArgType ${ctx.typerState.constraint}")
+ unapplyArgType
+ } else {
+ unapp.println("Neither sub nor super")
+ unapp.println(TypeComparer.explained(implicit ctx => unapplyArgType <:< selType))
+ errorType(
+ ex"Pattern type $unapplyArgType is neither a subtype nor a supertype of selector type $selType",
+ tree.pos)
+ }
+
+ val dummyArg = dummyTreeOfType(ownType)
+ val unapplyApp = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil)))
+ val unapplyImplicits = unapplyApp match {
+ case Apply(Apply(unapply, `dummyArg` :: Nil), args2) => assert(args2.nonEmpty); args2
+ case Apply(unapply, `dummyArg` :: Nil) => Nil
+ }
+
+ var argTypes = unapplyArgs(unapplyApp.tpe, unapplyFn, args, tree.pos)
+ for (argType <- argTypes) assert(!argType.isInstanceOf[TypeBounds], unapplyApp.tpe.show)
+ val bunchedArgs = argTypes match {
+ case argType :: Nil =>
+ if (argType.isRepeatedParam) untpd.SeqLiteral(args, untpd.TypeTree()) :: Nil
+ else if (args.lengthCompare(1) > 0 && ctx.canAutoTuple) untpd.Tuple(args) :: Nil
+ else args
+ case _ => args
+ }
+ if (argTypes.length != bunchedArgs.length) {
+ ctx.error(em"wrong number of argument patterns for $qual; expected: ($argTypes%, %)", tree.pos)
+ argTypes = argTypes.take(args.length) ++
+ List.fill(argTypes.length - args.length)(WildcardType)
+ }
+ val unapplyPatterns = (bunchedArgs, argTypes).zipped map (typed(_, _))
+ val result = assignType(cpy.UnApply(tree)(unapplyFn, unapplyImplicits, unapplyPatterns), ownType)
+ unapp.println(s"unapply patterns = $unapplyPatterns")
+ if ((ownType eq selType) || ownType.isError) result
+ else Typed(result, TypeTree(ownType))
+ case tp =>
+ val unapplyErr = if (tp.isError) unapplyFn else notAnExtractor(unapplyFn)
+ val typedArgsErr = args mapconserve (typed(_, defn.AnyType))
+ cpy.UnApply(tree)(unapplyErr, Nil, typedArgsErr) withType ErrorType
+ }
+ }
+
+ /** A typed unapply hook, can be overridden by re any-typers between frontend
+ * and pattern matcher.
+ */
+ def typedUnApply(tree: untpd.UnApply, selType: Type)(implicit ctx: Context): UnApply =
+ throw new UnsupportedOperationException("cannot type check an UnApply node")
+
+ /** Is given method reference applicable to type arguments `targs` and argument trees `args`?
+ * @param resultType The expected result type of the application
+ */
+ def isApplicable(methRef: TermRef, targs: List[Type], args: List[Tree], resultType: Type)(implicit ctx: Context): Boolean = {
+ val nestedContext = ctx.fresh.setExploreTyperState
+ new ApplicableToTrees(methRef, targs, args, resultType)(nestedContext).success
+ }
+
+ /** Is given method reference applicable to type arguments `targs` and argument trees `args` without inferring views?
+ * @param resultType The expected result type of the application
+ */
+ def isDirectlyApplicable(methRef: TermRef, targs: List[Type], args: List[Tree], resultType: Type)(implicit ctx: Context): Boolean = {
+ val nestedContext = ctx.fresh.setExploreTyperState
+ new ApplicableToTreesDirectly(methRef, targs, args, resultType)(nestedContext).success
+ }
+
+ /** Is given method reference applicable to argument types `args`?
+ * @param resultType The expected result type of the application
+ */
+ def isApplicable(methRef: TermRef, args: List[Type], resultType: Type)(implicit ctx: Context): Boolean = {
+ val nestedContext = ctx.fresh.setExploreTyperState
+ new ApplicableToTypes(methRef, args, resultType)(nestedContext).success
+ }
+
+ /** Is given type applicable to type arguments `targs` and argument trees `args`,
+ * possibly after inserting an `apply`?
+ * @param resultType The expected result type of the application
+ */
+ def isApplicable(tp: Type, targs: List[Type], args: List[Tree], resultType: Type)(implicit ctx: Context): Boolean =
+ onMethod(tp, isApplicable(_, targs, args, resultType))
+
+ /** Is given type applicable to argument types `args`, possibly after inserting an `apply`?
+ * @param resultType The expected result type of the application
+ */
+ def isApplicable(tp: Type, args: List[Type], resultType: Type)(implicit ctx: Context): Boolean =
+ onMethod(tp, isApplicable(_, args, resultType))
+
+ private def onMethod(tp: Type, p: TermRef => Boolean)(implicit ctx: Context): Boolean = tp match {
+ case methRef: TermRef if methRef.widenSingleton.isInstanceOf[MethodicType] =>
+ p(methRef)
+ case mt: MethodicType =>
+ p(mt.narrow)
+ case _ =>
+ tp.member(nme.apply).hasAltWith(d => p(TermRef(tp, nme.apply, d)))
+ }
+
+ /** In a set of overloaded applicable alternatives, is `alt1` at least as good as
+ * `alt2`? `alt1` and `alt2` are non-overloaded references.
+ */
+ def isAsGood(alt1: TermRef, alt2: TermRef)(implicit ctx: Context): Boolean = track("isAsGood") { ctx.traceIndented(i"isAsGood($alt1, $alt2)", overload) {
+
+ assert(alt1 ne alt2)
+
+ /** Is class or module class `sym1` derived from class or module class `sym2`?
+ * Module classes also inherit the relationship from their companions.
+ */
+ def isDerived(sym1: Symbol, sym2: Symbol): Boolean =
+ if (sym1 isSubClass sym2) true
+ else if (sym2 is Module) isDerived(sym1, sym2.companionClass)
+ else (sym1 is Module) && isDerived(sym1.companionClass, sym2)
+
+ /** Is alternative `alt1` with type `tp1` as specific as alternative
+ * `alt2` with type `tp2` ?
+ *
+ * 1. A method `alt1` of type (p1: T1, ..., pn: Tn)U is as specific as `alt2`
+ * if `alt2` is applicable to arguments (p1, ..., pn) of types T1,...,Tn
+ * or if `alt1` is nullary.
+ * 2. A polymorphic member of type [a1 >: L1 <: U1, ..., an >: Ln <: Un]T is as
+ * specific as `alt2` of type `tp2` if T is as specific as `tp2` under the
+ * assumption that for i = 1,...,n each ai is an abstract type name bounded
+ * from below by Li and from above by Ui.
+ * 3. A member of any other type `tp1` is:
+ * a. always as specific as a method or a polymorphic method.
+ * b. as specific as a member of any other type `tp2` if `tp1` is compatible
+ * with `tp2`.
+ */
+ def isAsSpecific(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = ctx.traceIndented(i"isAsSpecific $tp1 $tp2", overload) { tp1 match {
+ case tp1: MethodType => // (1)
+ def repeatedToSingle(tp: Type): Type = tp match {
+ case tp @ ExprType(tp1) => tp.derivedExprType(repeatedToSingle(tp1))
+ case _ => if (tp.isRepeatedParam) tp.argTypesHi.head else tp
+ }
+ val formals1 =
+ if (tp1.isVarArgsMethod && tp2.isVarArgsMethod) tp1.paramTypes map repeatedToSingle
+ else tp1.paramTypes
+ isApplicable(alt2, formals1, WildcardType) ||
+ tp1.paramTypes.isEmpty && tp2.isInstanceOf[MethodOrPoly]
+ case tp1: PolyType => // (2)
+ val tparams = ctx.newTypeParams(alt1.symbol, tp1.paramNames, EmptyFlags, tp1.instantiateBounds)
+ isAsSpecific(alt1, tp1.instantiate(tparams map (_.typeRef)), alt2, tp2)
+ case _ => // (3)
+ tp2 match {
+ case tp2: MethodType => true // (3a)
+ case tp2: PolyType if tp2.isPolymorphicMethodType => true // (3a)
+ case tp2: PolyType => // (3b)
+ val nestedCtx = ctx.fresh.setExploreTyperState
+
+ {
+ implicit val ctx: Context = nestedCtx
+ isAsSpecificValueType(tp1, constrained(tp2).resultType)
+ }
+ case _ => // (3b)
+ isAsSpecificValueType(tp1, tp2)
+ }
+ }}
+
+ /** Test whether value type `tp1` is as specific as value type `tp2`.
+ * Let's abbreviate this to `tp1 <:s tp2`.
+ * Previously, `<:s` was the same as `<:`. This behavior is still
+ * available under mode `Mode.OldOverloadingResolution`. The new behavior
+ * is different, however. Here, `T <:s U` iff
+ *
+ * flip(T) <: flip(U)
+ *
+ * where `flip` changes top-level contravariant type aliases to covariant ones.
+ * Intuitively `<:s` means subtyping `<:`, except that all top-level arguments
+ * to contravariant parameters are compared as if they were covariant. E.g. given class
+ *
+ * class Cmp[-X]
+ *
+ * `Cmp[T] <:s Cmp[U]` if `T <: U`. On the other hand, nested occurrences
+ * of parameters are not affected.
+ * So `T <: U` would imply `List[Cmp[U]] <:s List[Cmp[T]]`, as usual.
+ *
+ * This relation might seem strange, but it models closely what happens for methods.
+ * Indeed, if we integrate the existing rules for methods into `<:s` we have now that
+ *
+ * (T)R <:s (U)R
+ *
+ * iff
+ *
+ * T => R <:s U => R
+ */
+ def isAsSpecificValueType(tp1: Type, tp2: Type)(implicit ctx: Context) =
+ if (ctx.mode.is(Mode.OldOverloadingResolution))
+ isCompatible(tp1, tp2)
+ else {
+ val flip = new TypeMap {
+ def apply(t: Type) = t match {
+ case t: TypeAlias if variance > 0 && t.variance < 0 => t.derivedTypeAlias(t.alias, 1)
+ case t: TypeBounds => t
+ case _ => mapOver(t)
+ }
+ }
+ isCompatible(flip(tp1), flip(tp2))
+ }
+
+ /** Drop any implicit parameter section */
+ def stripImplicit(tp: Type): Type = tp match {
+ case mt: ImplicitMethodType if !mt.isDependent =>
+ mt.resultType
+ // todo: make sure implicit method types are not dependent?
+ // but check test case in /tests/pos/depmet_implicit_chaining_zw.scala
+ case pt: PolyType =>
+ pt.derivedPolyType(pt.paramNames, pt.paramBounds, stripImplicit(pt.resultType))
+ case _ =>
+ tp
+ }
+
+ val owner1 = if (alt1.symbol.exists) alt1.symbol.owner else NoSymbol
+ val owner2 = if (alt2.symbol.exists) alt2.symbol.owner else NoSymbol
+ val tp1 = stripImplicit(alt1.widen)
+ val tp2 = stripImplicit(alt2.widen)
+
+ def winsOwner1 = isDerived(owner1, owner2)
+ def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2)
+ def winsOwner2 = isDerived(owner2, owner1)
+ def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1)
+
+ overload.println(i"isAsGood($alt1, $alt2)? $tp1 $tp2 $winsOwner1 $winsType1 $winsOwner2 $winsType2")
+
+ // Assume the following probabilities:
+ //
+ // P(winsOwnerX) = 2/3
+ // P(winsTypeX) = 1/3
+ //
+ // Then the call probabilities of the 4 basic operations are as follows:
+ //
+ // winsOwner1: 1/1
+ // winsOwner2: 1/1
+ // winsType1 : 7/9
+ // winsType2 : 4/9
+
+ if (winsOwner1) /* 6/9 */ !winsOwner2 || /* 4/9 */ winsType1 || /* 8/27 */ !winsType2
+ else if (winsOwner2) /* 2/9 */ winsType1 && /* 2/27 */ !winsType2
+ else /* 1/9 */ winsType1 || /* 2/27 */ !winsType2
+ }}
+
+ def narrowMostSpecific(alts: List[TermRef])(implicit ctx: Context): List[TermRef] = track("narrowMostSpecific") {
+ alts match {
+ case Nil => alts
+ case _ :: Nil => alts
+ case alt :: alts1 =>
+ def winner(bestSoFar: TermRef, alts: List[TermRef]): TermRef = alts match {
+ case alt :: alts1 =>
+ winner(if (isAsGood(alt, bestSoFar)) alt else bestSoFar, alts1)
+ case nil =>
+ bestSoFar
+ }
+ val best = winner(alt, alts1)
+ def asGood(alts: List[TermRef]): List[TermRef] = alts match {
+ case alt :: alts1 =>
+ if ((alt eq best) || !isAsGood(alt, best)) asGood(alts1)
+ else alt :: asGood(alts1)
+ case nil =>
+ Nil
+ }
+ best :: asGood(alts)
+ }
+ }
+
+ /** Resolve overloaded alternative `alts`, given expected type `pt` and
+ * possibly also type argument `targs` that need to be applied to each alternative
+ * to form the method type.
+ * todo: use techniques like for implicits to pick candidates quickly?
+ */
+ def resolveOverloaded(alts: List[TermRef], pt: Type)(implicit ctx: Context): List[TermRef] = track("resolveOverloaded") {
+
+ /** Is `alt` a method or polytype whose result type after the first value parameter
+ * section conforms to the expected type `resultType`? If `resultType`
+ * is a `IgnoredProto`, pick the underlying type instead.
+ */
+ def resultConforms(alt: Type, resultType: Type)(implicit ctx: Context): Boolean = resultType match {
+ case IgnoredProto(ignored) => resultConforms(alt, ignored)
+ case _: ValueType =>
+ alt.widen match {
+ case tp: PolyType => resultConforms(constrained(tp).resultType, resultType)
+ case tp: MethodType => constrainResult(tp.resultType, resultType)
+ case _ => true
+ }
+ case _ => true
+ }
+
+ /** If the `chosen` alternative has a result type incompatible with the expected result
+ * type `pt`, run overloading resolution again on all alternatives that do match `pt`.
+ * If the latter succeeds with a single alternative, return it, otherwise
+ * fallback to `chosen`.
+ *
+ * Note this order of events is done for speed. One might be tempted to
+ * preselect alternatives by result type. But is slower, because it discriminates
+ * less. The idea is when searching for a best solution, as is the case in overloading
+ * resolution, we should first try criteria which are cheap and which have a high
+ * probability of pruning the search. result type comparisons are neither cheap nor
+ * do they prune much, on average.
+ */
+ def adaptByResult(chosen: TermRef) = {
+ def nestedCtx = ctx.fresh.setExploreTyperState
+ pt match {
+ case pt: FunProto if !resultConforms(chosen, pt.resultType)(nestedCtx) =>
+ alts.filter(alt =>
+ (alt ne chosen) && resultConforms(alt, pt.resultType)(nestedCtx)) match {
+ case Nil => chosen
+ case alt2 :: Nil => alt2
+ case alts2 =>
+ resolveOverloaded(alts2, pt) match {
+ case alt2 :: Nil => alt2
+ case _ => chosen
+ }
+ }
+ case _ => chosen
+ }
+ }
+
+ var found = resolveOverloaded(alts, pt, Nil)(ctx.retractMode(Mode.ImplicitsEnabled))
+ if (found.isEmpty && ctx.mode.is(Mode.ImplicitsEnabled))
+ found = resolveOverloaded(alts, pt, Nil)
+ found match {
+ case alt :: Nil => adaptByResult(alt) :: Nil
+ case _ => found
+ }
+ }
+
+ /** This private version of `resolveOverloaded` does the bulk of the work of
+ * overloading resolution, but does not do result adaptation. It might be
+ * called twice from the public `resolveOverloaded` method, once with
+ * implicits enabled, and once without.
+ */
+ private def resolveOverloaded(alts: List[TermRef], pt: Type, targs: List[Type])(implicit ctx: Context): List[TermRef] = track("resolveOverloaded") {
+
+ def isDetermined(alts: List[TermRef]) = alts.isEmpty || alts.tail.isEmpty
+
+ /** The shape of given tree as a type; cannot handle named arguments. */
+ def typeShape(tree: untpd.Tree): Type = tree match {
+ case untpd.Function(args, body) =>
+ defn.FunctionOf(args map Function.const(defn.AnyType), typeShape(body))
+ case _ =>
+ defn.NothingType
+ }
+
+ /** The shape of given tree as a type; is more expensive than
+ * typeShape but can can handle named arguments.
+ */
+ def treeShape(tree: untpd.Tree): Tree = tree match {
+ case NamedArg(name, arg) =>
+ val argShape = treeShape(arg)
+ cpy.NamedArg(tree)(name, argShape).withType(argShape.tpe)
+ case _ =>
+ dummyTreeOfType(typeShape(tree))
+ }
+
+ def narrowByTypes(alts: List[TermRef], argTypes: List[Type], resultType: Type): List[TermRef] =
+ alts filter (isApplicable(_, argTypes, resultType))
+
+ val candidates = pt match {
+ case pt @ FunProto(args, resultType, _) =>
+ val numArgs = args.length
+ val normArgs = args.mapConserve {
+ case Block(Nil, expr) => expr
+ case x => x
+ }
+
+ def sizeFits(alt: TermRef, tp: Type): Boolean = tp match {
+ case tp: PolyType => sizeFits(alt, tp.resultType)
+ case MethodType(_, ptypes) =>
+ val numParams = ptypes.length
+ def isVarArgs = ptypes.nonEmpty && ptypes.last.isRepeatedParam
+ def hasDefault = alt.symbol.hasDefaultParams
+ if (numParams == numArgs) true
+ else if (numParams < numArgs) isVarArgs
+ else if (numParams > numArgs + 1) hasDefault
+ else isVarArgs || hasDefault
+ case _ =>
+ numArgs == 0
+ }
+
+ def narrowBySize(alts: List[TermRef]): List[TermRef] =
+ alts filter (alt => sizeFits(alt, alt.widen))
+
+ def narrowByShapes(alts: List[TermRef]): List[TermRef] = {
+ if (normArgs exists (_.isInstanceOf[untpd.Function]))
+ if (hasNamedArg(args)) narrowByTrees(alts, args map treeShape, resultType)
+ else narrowByTypes(alts, normArgs map typeShape, resultType)
+ else
+ alts
+ }
+
+ def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] = {
+ val alts2 = alts.filter(alt =>
+ isDirectlyApplicable(alt, targs, args, resultType)
+ )
+ if (alts2.isEmpty && !ctx.isAfterTyper)
+ alts.filter(alt =>
+ isApplicable(alt, targs, args, resultType)
+ )
+ else
+ alts2
+ }
+
+ val alts1 = narrowBySize(alts)
+ //ctx.log(i"narrowed by size: ${alts1.map(_.symbol.showDcl)}%, %")
+ if (isDetermined(alts1)) alts1
+ else {
+ val alts2 = narrowByShapes(alts1)
+ //ctx.log(i"narrowed by shape: ${alts1.map(_.symbol.showDcl)}%, %")
+ if (isDetermined(alts2)) alts2
+ else {
+ pretypeArgs(alts2, pt)
+ narrowByTrees(alts2, pt.typedArgs, resultType)
+ }
+ }
+
+ case pt @ PolyProto(targs1, pt1) =>
+ assert(targs.isEmpty)
+ val alts1 = alts filter pt.isMatchedBy
+ resolveOverloaded(alts1, pt1, targs1)
+
+ case defn.FunctionOf(args, resultType) =>
+ narrowByTypes(alts, args, resultType)
+
+ case pt =>
+ alts filter (normalizedCompatible(_, pt))
+ }
+ val found = narrowMostSpecific(candidates)
+ if (found.length <= 1) found
+ else {
+ val noDefaults = alts.filter(!_.symbol.hasDefaultParams)
+ if (noDefaults.length == 1) noDefaults // return unique alternative without default parameters if it exists
+ else {
+ val deepPt = pt.deepenProto
+ if (deepPt ne pt) resolveOverloaded(alts, deepPt, targs)
+ else alts
+ }
+ }
+ }
+
+ /** Try to typecheck any arguments in `pt` that are function values missing a
+ * parameter type. The expected type for these arguments is the lub of the
+ * corresponding formal parameter types of all alternatives. Type variables
+ * in formal parameter types are replaced by wildcards. The result of the
+ * typecheck is stored in `pt`, to be retrieved when its `typedArgs` are selected.
+ * The benefit of doing this is to allow idioms like this:
+ *
+ * def map(f: Char => Char): String = ???
+ * def map[U](f: Char => U): Seq[U] = ???
+ * map(x => x.toUpper)
+ *
+ * Without `pretypeArgs` we'd get a "missing parameter type" error for `x`.
+ * With `pretypeArgs`, we use the union of the two formal parameter types
+ * `Char => Char` and `Char => ?` as the expected type of the closure `x => x.toUpper`.
+ * That union is `Char => Char`, so we have an expected parameter type `Char`
+ * for `x`, and the code typechecks.
+ */
+ private def pretypeArgs(alts: List[TermRef], pt: FunProto)(implicit ctx: Context): Unit = {
+ def recur(altFormals: List[List[Type]], args: List[untpd.Tree]): Unit = args match {
+ case arg :: args1 if !altFormals.exists(_.isEmpty) =>
+ def isUnknownParamType(t: untpd.Tree) = t match {
+ case ValDef(_, tpt, _) => tpt.isEmpty
+ case _ => false
+ }
+ arg match {
+ case arg: untpd.Function if arg.args.exists(isUnknownParamType) =>
+ def isUniform[T](xs: List[T])(p: (T, T) => Boolean) = xs.forall(p(_, xs.head))
+ val formalsForArg: List[Type] = altFormals.map(_.head)
+ // For alternatives alt_1, ..., alt_n, test whether formal types for current argument are of the form
+ // (p_1_1, ..., p_m_1) => r_1
+ // ...
+ // (p_1_n, ..., p_m_n) => r_n
+ val decomposedFormalsForArg: List[Option[(List[Type], Type)]] =
+ formalsForArg.map(defn.FunctionOf.unapply)
+ if (decomposedFormalsForArg.forall(_.isDefined)) {
+ val formalParamTypessForArg: List[List[Type]] =
+ decomposedFormalsForArg.map(_.get._1)
+ if (isUniform(formalParamTypessForArg)((x, y) => x.length == y.length)) {
+ val commonParamTypes = formalParamTypessForArg.transpose.map(ps =>
+ // Given definitions above, for i = 1,...,m,
+ // ps(i) = List(p_i_1, ..., p_i_n) -- i.e. a column
+ // If all p_i_k's are the same, assume the type as formal parameter
+ // type of the i'th parameter of the closure.
+ if (isUniform(ps)(ctx.typeComparer.isSameTypeWhenFrozen(_, _))) ps.head
+ else WildcardType)
+ val commonFormal = defn.FunctionOf(commonParamTypes, WildcardType)
+ overload.println(i"pretype arg $arg with expected type $commonFormal")
+ pt.typedArg(arg, commonFormal)
+ }
+ }
+ case _ =>
+ }
+ recur(altFormals.map(_.tail), args1)
+ case _ =>
+ }
+ def paramTypes(alt: Type): List[Type] = alt match {
+ case mt: MethodType => mt.paramTypes
+ case mt: PolyType => paramTypes(mt.resultType)
+ case _ => Nil
+ }
+ recur(alts.map(alt => paramTypes(alt.widen)), pt.args)
+ }
+
+ private def harmonizeWith[T <: AnyRef](ts: List[T])(tpe: T => Type, adapt: (T, Type) => T)(implicit ctx: Context): List[T] = {
+ def numericClasses(ts: List[T], acc: Set[Symbol]): Set[Symbol] = ts match {
+ case t :: ts1 =>
+ val sym = tpe(t).widen.classSymbol
+ if (sym.isNumericValueClass) numericClasses(ts1, acc + sym)
+ else Set()
+ case Nil =>
+ acc
+ }
+ val clss = numericClasses(ts, Set())
+ if (clss.size > 1) {
+ val lub = defn.ScalaNumericValueTypeList.find(lubTpe =>
+ clss.forall(cls => defn.isValueSubType(cls.typeRef, lubTpe))).get
+ ts.mapConserve(adapt(_, lub))
+ }
+ else ts
+ }
+
+ /** If `trees` all have numeric value types, and they do not have all the same type,
+ * pick a common numeric supertype and convert all trees to this type.
+ */
+ def harmonize(trees: List[Tree])(implicit ctx: Context): List[Tree] = {
+ def adapt(tree: Tree, pt: Type): Tree = tree match {
+ case cdef: CaseDef => tpd.cpy.CaseDef(cdef)(body = adapt(cdef.body, pt))
+ case _ => adaptInterpolated(tree, pt, tree)
+ }
+ if (ctx.isAfterTyper) trees else harmonizeWith(trees)(_.tpe, adapt)
+ }
+
+ /** If all `types` are numeric value types, and they are not all the same type,
+ * pick a common numeric supertype and return it instead of every original type.
+ */
+ def harmonizeTypes(tpes: List[Type])(implicit ctx: Context): List[Type] =
+ harmonizeWith(tpes)(identity, (tp, pt) => pt)
+}
+
diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala
new file mode 100644
index 000000000..dbfc89f6c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala
@@ -0,0 +1,557 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast._
+import Contexts._
+import Types._
+import Flags._
+import Denotations._
+import Names._
+import StdNames._
+import NameOps._
+import Symbols._
+import Trees._
+import ProtoTypes._
+import Constants._
+import Scopes._
+import CheckRealizable._
+import ErrorReporting.errorTree
+import annotation.unchecked
+import util.Positions._
+import util.{Stats, SimpleMap}
+import util.common._
+import transform.SymUtils._
+import Decorators._
+import Uniques._
+import ErrorReporting.{err, errorType}
+import config.Printers.typr
+import collection.mutable
+import SymDenotations.NoCompleter
+
+object Checking {
+ import tpd._
+
+ /** A general checkBounds method that can be used for TypeApply nodes as
+ * well as for AppliedTypeTree nodes. Also checks that type arguments to
+ * *-type parameters are fully applied.
+ */
+ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context): Unit = {
+ (args, boundss).zipped.foreach { (arg, bound) =>
+ if (!bound.isHK && arg.tpe.isHK)
+ ctx.error(ex"missing type parameter(s) for $arg", arg.pos)
+ }
+ for ((arg, which, bound) <- ctx.boundsViolations(args, boundss, instantiate))
+ ctx.error(
+ ex"Type argument ${arg.tpe} does not conform to $which bound $bound ${err.whyNoMatchStr(arg.tpe, bound)}",
+ arg.pos.focus)
+ }
+
+ /** Check that type arguments `args` conform to corresponding bounds in `poly`
+ * Note: This does not check the bounds of AppliedTypeTrees. These
+ * are handled by method checkBounds in FirstTransform
+ */
+ def checkBounds(args: List[tpd.Tree], poly: PolyType)(implicit ctx: Context): Unit =
+ checkBounds(args, poly.paramBounds, _.substParams(poly, _))
+
+ /** Check applied type trees for well-formedness. This means
+ * - all arguments are within their corresponding bounds
+ * - if type is a higher-kinded application with wildcard arguments,
+ * check that it or one of its supertypes can be reduced to a normal application.
+ * Unreducible applications correspond to general existentials, and we
+ * cannot handle those.
+ */
+ def checkAppliedType(tree: AppliedTypeTree)(implicit ctx: Context) = {
+ val AppliedTypeTree(tycon, args) = tree
+ // If `args` is a list of named arguments, return corresponding type parameters,
+ // otherwise return type parameters unchanged
+ val tparams = tycon.tpe.typeParams
+ def argNamed(tparam: TypeParamInfo) = args.find {
+ case NamedArg(name, _) => name == tparam.paramName
+ case _ => false
+ }.getOrElse(TypeTree(tparam.paramRef))
+ val orderedArgs = if (hasNamedArg(args)) tparams.map(argNamed) else args
+ val bounds = tparams.map(_.paramBoundsAsSeenFrom(tycon.tpe))
+ def instantiate(bound: Type, args: List[Type]) =
+ bound.LambdaAbstract(tparams).appliedTo(args)
+ checkBounds(orderedArgs, bounds, instantiate)
+
+ def checkWildcardHKApply(tp: Type, pos: Position): Unit = tp match {
+ case tp @ HKApply(tycon, args) if args.exists(_.isInstanceOf[TypeBounds]) =>
+ tycon match {
+ case tycon: PolyType =>
+ ctx.errorOrMigrationWarning(
+ ex"unreducible application of higher-kinded type $tycon to wildcard arguments",
+ pos)
+ case _ =>
+ checkWildcardHKApply(tp.superType, pos)
+ }
+ case _ =>
+ }
+ def checkValidIfHKApply(implicit ctx: Context): Unit =
+ checkWildcardHKApply(tycon.tpe.appliedTo(args.map(_.tpe)), tree.pos)
+ checkValidIfHKApply(ctx.addMode(Mode.AllowLambdaWildcardApply))
+ }
+
+ /** Check that `tp` refers to a nonAbstract class
+ * and that the instance conforms to the self type of the created class.
+ */
+ def checkInstantiable(tp: Type, pos: Position)(implicit ctx: Context): Unit =
+ tp.underlyingClassRef(refinementOK = false) match {
+ case tref: TypeRef =>
+ val cls = tref.symbol
+ if (cls.is(AbstractOrTrait))
+ ctx.error(em"$cls is abstract; cannot be instantiated", pos)
+ if (!cls.is(Module)) {
+ // Create a synthetic singleton type instance, and check whether
+ // it conforms to the self type of the class as seen from that instance.
+ val stp = SkolemType(tp)
+ val selfType = tref.givenSelfType.asSeenFrom(stp, cls)
+ if (selfType.exists && !(stp <:< selfType))
+ ctx.error(ex"$tp does not conform to its self type $selfType; cannot be instantiated")
+ }
+ case _ =>
+ }
+
+ /** Check that type `tp` is realizable. */
+ def checkRealizable(tp: Type, pos: Position)(implicit ctx: Context): Unit = {
+ val rstatus = realizability(tp)
+ if (rstatus ne Realizable) {
+ def msg = em"$tp is not a legal path\n since it${rstatus.msg}"
+ if (ctx.scala2Mode) ctx.migrationWarning(msg, pos) else ctx.error(msg, pos)
+ }
+ }
+
+ /** A type map which checks that the only cycles in a type are F-bounds
+ * and that protects all F-bounded references by LazyRefs.
+ */
+ class CheckNonCyclicMap(sym: Symbol, reportErrors: Boolean)(implicit ctx: Context) extends TypeMap {
+
+ /** Are cycles allowed within nested refinedInfos of currently checked type? */
+ private var nestedCycleOK = false
+
+ /** Are cycles allowed within currently checked type? */
+ private var cycleOK = false
+
+ /** A diagnostic output string that indicates the position of the last
+ * part of a type bounds checked by checkInfo. Possible choices:
+ * alias, lower bound, upper bound.
+ */
+ var where: String = ""
+
+ /** The last type top-level type checked when a CyclicReference occurs. */
+ var lastChecked: Type = NoType
+
+ /** Check info `tp` for cycles. Throw CyclicReference for illegal cycles,
+ * break direct cycle with a LazyRef for legal, F-bounded cycles.
+ */
+ def checkInfo(tp: Type): Type = tp match {
+ case tp @ TypeAlias(alias) =>
+ try tp.derivedTypeAlias(apply(alias))
+ finally {
+ where = "alias"
+ lastChecked = alias
+ }
+ case tp @ TypeBounds(lo, hi) =>
+ val lo1 = try apply(lo) finally {
+ where = "lower bound"
+ lastChecked = lo
+ }
+ val saved = nestedCycleOK
+ nestedCycleOK = true
+ try tp.derivedTypeBounds(lo1, apply(hi))
+ finally {
+ nestedCycleOK = saved
+ where = "upper bound"
+ lastChecked = hi
+ }
+ case _ =>
+ tp
+ }
+
+ private def apply(tp: Type, cycleOK: Boolean, nestedCycleOK: Boolean): Type = {
+ val savedCycleOK = this.cycleOK
+ val savedNestedCycleOK = this.nestedCycleOK
+ this.cycleOK = cycleOK
+ this.nestedCycleOK = nestedCycleOK
+ try apply(tp)
+ finally {
+ this.cycleOK = savedCycleOK
+ this.nestedCycleOK = savedNestedCycleOK
+ }
+ }
+
+ def apply(tp: Type): Type = tp match {
+ case tp: TermRef =>
+ this(tp.info)
+ mapOver(tp)
+ case tp @ RefinedType(parent, name, rinfo) =>
+ tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK))
+ case tp: RecType =>
+ tp.rebind(this(tp.parent))
+ case tp @ HKApply(tycon, args) =>
+ tp.derivedAppliedType(this(tycon), args.map(this(_, nestedCycleOK, nestedCycleOK)))
+ case tp @ TypeRef(pre, name) =>
+ try {
+ // A prefix is interesting if it might contain (transitively) a reference
+ // to symbol `sym` itself. We only check references with interesting
+ // prefixes for cycles. This pruning is done in order not to force
+ // global symbols when doing the cyclicity check.
+ def isInteresting(prefix: Type): Boolean = prefix.stripTypeVar match {
+ case NoPrefix => true
+ case prefix: ThisType => sym.owner.isClass && prefix.cls.isContainedIn(sym.owner)
+ case prefix: NamedType => !prefix.symbol.isStaticOwner && isInteresting(prefix.prefix)
+ case SuperType(thistp, _) => isInteresting(thistp)
+ case AndType(tp1, tp2) => isInteresting(tp1) || isInteresting(tp2)
+ case OrType(tp1, tp2) => isInteresting(tp1) && isInteresting(tp2)
+ case _: RefinedOrRecType | _: HKApply => true
+ case _ => false
+ }
+ if (isInteresting(pre)) {
+ val pre1 = this(pre, false, false)
+ checkInfo(tp.info)
+ if (pre1 eq pre) tp else tp.newLikeThis(pre1)
+ }
+ else tp
+ } catch {
+ case ex: CyclicReference =>
+ ctx.debuglog(i"cycle detected for $tp, $nestedCycleOK, $cycleOK")
+ if (cycleOK) LazyRef(() => tp)
+ else if (reportErrors) throw ex
+ else tp
+ }
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** Check that `info` of symbol `sym` is not cyclic.
+ * @pre sym is not yet initialized (i.e. its type is a Completer).
+ * @return `info` where every legal F-bounded reference is proctected
+ * by a `LazyRef`, or `ErrorType` if a cycle was detected and reported.
+ */
+ def checkNonCyclic(sym: Symbol, info: Type, reportErrors: Boolean)(implicit ctx: Context): Type = {
+ val checker = new CheckNonCyclicMap(sym, reportErrors)(ctx.addMode(Mode.CheckCyclic))
+ try checker.checkInfo(info)
+ catch {
+ case ex: CyclicReference =>
+ if (reportErrors) {
+ ctx.error(i"illegal cyclic reference: ${checker.where} ${checker.lastChecked} of $sym refers back to the type itself", sym.pos)
+ ErrorType
+ }
+ else info
+ }
+ }
+
+ /** Check that refinement satisfies the following two conditions
+ * 1. No part of it refers to a symbol that's defined in the same refinement
+ * at a textually later point.
+ * 2. All references to the refinement itself via `this` are followed by
+ * selections.
+ * Note: It's not yet clear what exactly we want to allow and what we want to rule out.
+ * This depends also on firming up the DOT calculus. For the moment we only issue
+ * deprecated warnings, not errors.
+ */
+ def checkRefinementNonCyclic(refinement: Tree, refineCls: ClassSymbol, seen: mutable.Set[Symbol])
+ (implicit ctx: Context): Unit = {
+ def flag(what: String, tree: Tree) =
+ ctx.deprecationWarning(i"$what reference in refinement is deprecated", tree.pos)
+ def forwardRef(tree: Tree) = flag("forward", tree)
+ def selfRef(tree: Tree) = flag("self", tree)
+ val checkTree = new TreeAccumulator[Unit] {
+ def checkRef(tree: Tree, sym: Symbol) =
+ if (sym.maybeOwner == refineCls && !seen(sym)) forwardRef(tree)
+ def apply(x: Unit, tree: Tree)(implicit ctx: Context) = tree match {
+ case tree: MemberDef =>
+ foldOver(x, tree)
+ seen += tree.symbol
+ case tree @ Select(This(_), _) =>
+ checkRef(tree, tree.symbol)
+ case tree: RefTree =>
+ checkRef(tree, tree.symbol)
+ foldOver(x, tree)
+ case tree: This =>
+ selfRef(tree)
+ case tree: TypeTree =>
+ val checkType = new TypeAccumulator[Unit] {
+ def apply(x: Unit, tp: Type): Unit = tp match {
+ case tp: NamedType =>
+ checkRef(tree, tp.symbol)
+ tp.prefix match {
+ case pre: ThisType =>
+ case pre => foldOver(x, pre)
+ }
+ case tp: ThisType if tp.cls == refineCls =>
+ selfRef(tree)
+ case _ =>
+ foldOver(x, tp)
+ }
+ }
+ checkType((), tree.tpe)
+ case _ =>
+ foldOver(x, tree)
+ }
+ }
+ checkTree((), refinement)
+ }
+
+ /** Check that symbol's definition is well-formed. */
+ def checkWellFormed(sym: Symbol)(implicit ctx: Context): Unit = {
+ //println(i"check wf $sym with flags ${sym.flags}")
+ def fail(msg: String) = ctx.error(msg, sym.pos)
+ def varNote =
+ if (sym.is(Mutable)) "\n(Note that variables need to be initialized to be defined)"
+ else ""
+
+ def checkWithDeferred(flag: FlagSet) =
+ if (sym.is(flag))
+ fail(i"abstract member may not have `$flag' modifier")
+ def checkNoConflict(flag1: FlagSet, flag2: FlagSet) =
+ if (sym.is(allOf(flag1, flag2)))
+ fail(i"illegal combination of modifiers: $flag1 and $flag2 for: $sym")
+
+ if (sym.is(ImplicitCommon)) {
+ if (sym.owner.is(Package))
+ fail(i"`implicit' modifier cannot be used for top-level definitions")
+ if (sym.isType)
+ fail(i"`implicit' modifier cannot be used for types or traits")
+ }
+ if (!sym.isClass && sym.is(Abstract))
+ fail(i"`abstract' modifier can be used only for classes; it should be omitted for abstract members")
+ if (sym.is(AbsOverride) && !sym.owner.is(Trait))
+ fail(i"`abstract override' modifier only allowed for members of traits")
+ if (sym.is(Trait) && sym.is(Final))
+ fail(i"$sym may not be `final'")
+ if (sym.hasAnnotation(defn.NativeAnnot)) {
+ if (!sym.is(Deferred))
+ fail(i"`@native' members may not have implementation")
+ }
+ else if (sym.is(Deferred, butNot = Param) && !sym.isSelfSym) {
+ if (!sym.owner.isClass || sym.owner.is(Module) || sym.owner.isAnonymousClass)
+ fail(i"only classes can have declared but undefined members$varNote")
+ checkWithDeferred(Private)
+ checkWithDeferred(Final)
+ checkWithDeferred(Inline)
+ }
+ if (sym.isValueClass && sym.is(Trait) && !sym.isRefinementClass)
+ fail(i"$sym cannot extend AnyVal")
+ checkNoConflict(Final, Sealed)
+ checkNoConflict(Private, Protected)
+ checkNoConflict(Abstract, Override)
+ }
+
+ /** Check the type signature of the symbol `M` defined by `tree` does not refer
+ * to a private type or value which is invisible at a point where `M` is still
+ * visible. As an exception, we allow references to type aliases if the underlying
+ * type of the alias is not a leak. So type aliases are transparent as far as
+ * leak testing is concerned.
+ * @return The `info` of `sym`, with problematic aliases expanded away.
+ * See i997.scala for tests, i1130.scala for a case where it matters that we
+ * transform leaky aliases away.
+ */
+ def checkNoPrivateLeaks(sym: Symbol, pos: Position)(implicit ctx: Context): Type = {
+ class NotPrivate extends TypeMap {
+ type Errors = List[(String, Position)]
+ var errors: Errors = Nil
+ def accessBoundary(sym: Symbol): Symbol =
+ if (sym.is(Private)) sym.owner
+ else if (sym.privateWithin.exists) sym.privateWithin
+ else if (sym.is(Package)) sym
+ else accessBoundary(sym.owner)
+ def apply(tp: Type): Type = tp match {
+ case tp: NamedType =>
+ val prevErrors = errors
+ var tp1 =
+ if (tp.symbol.is(Private) &&
+ !accessBoundary(sym).isContainedIn(tp.symbol.owner)) {
+ errors = (em"non-private $sym refers to private ${tp.symbol}\n in its type signature ${sym.info}",
+ sym.pos) :: errors
+ tp
+ }
+ else mapOver(tp)
+ if ((errors ne prevErrors) && tp.info.isAlias) {
+ // try to dealias to avoid a leak error
+ val savedErrors = errors
+ errors = prevErrors
+ val tp2 = apply(tp.superType)
+ if (errors eq prevErrors) tp1 = tp2
+ else errors = savedErrors
+ }
+ tp1
+ case tp: ClassInfo =>
+ tp.derivedClassInfo(
+ prefix = apply(tp.prefix),
+ classParents = tp.parentsWithArgs.map(p =>
+ apply(p).underlyingClassRef(refinementOK = false).asInstanceOf[TypeRef]))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ val notPrivate = new NotPrivate
+ val info = notPrivate(sym.info)
+ notPrivate.errors.foreach { case (msg, pos) => ctx.errorOrMigrationWarning(msg, pos) }
+ info
+ }
+}
+
+trait Checking {
+
+ import tpd._
+
+ def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(implicit ctx: Context): Type =
+ Checking.checkNonCyclic(sym, info, reportErrors)
+
+ /** Check that Java statics and packages can only be used in selections.
+ */
+ def checkValue(tree: Tree, proto: Type)(implicit ctx: Context): tree.type = {
+ if (!proto.isInstanceOf[SelectionProto]) {
+ val sym = tree.tpe.termSymbol
+ // The check is avoided inside Java compilation units because it always fails
+ // on the singleton type Module.type.
+ if ((sym is Package) || ((sym is JavaModule) && !ctx.compilationUnit.isJava)) ctx.error(em"$sym is not a value", tree.pos)
+ }
+ tree
+ }
+
+ /** Check that type `tp` is stable. */
+ def checkStable(tp: Type, pos: Position)(implicit ctx: Context): Unit =
+ if (!tp.isStable) ctx.error(ex"$tp is not stable", pos)
+
+ /** Check that all type members of `tp` have realizable bounds */
+ def checkRealizableBounds(tp: Type, pos: Position)(implicit ctx: Context): Unit = {
+ val rstatus = boundsRealizability(tp)
+ if (rstatus ne Realizable)
+ ctx.error(ex"$tp cannot be instantiated since it${rstatus.msg}", pos)
+ }
+
+ /** Check that `tp` is a class type.
+ * Also, if `traitReq` is true, check that `tp` is a trait.
+ * Also, if `stablePrefixReq` is true and phase is not after RefChecks,
+ * check that class prefix is stable.
+ * @return `tp` itself if it is a class or trait ref, ObjectType if not.
+ */
+ def checkClassType(tp: Type, pos: Position, traitReq: Boolean, stablePrefixReq: Boolean)(implicit ctx: Context): Type =
+ tp.underlyingClassRef(refinementOK = false) match {
+ case tref: TypeRef =>
+ if (traitReq && !(tref.symbol is Trait)) ctx.error(ex"$tref is not a trait", pos)
+ if (stablePrefixReq && ctx.phase <= ctx.refchecksPhase) checkStable(tref.prefix, pos)
+ tp
+ case _ =>
+ ctx.error(ex"$tp is not a class type", pos)
+ defn.ObjectType
+ }
+
+ /** Check that a non-implicit parameter making up the first parameter section of an
+ * implicit conversion is not a singleton type.
+ */
+ def checkImplicitParamsNotSingletons(vparamss: List[List[ValDef]])(implicit ctx: Context): Unit = vparamss match {
+ case (vparam :: Nil) :: _ if !(vparam.symbol is Implicit) =>
+ if (vparam.tpt.tpe.isInstanceOf[SingletonType])
+ ctx.error(s"implicit conversion may not have a parameter of singleton type", vparam.tpt.pos)
+ case _ =>
+ }
+
+ /** Check that any top-level type arguments in this type are feasible, i.e. that
+ * their lower bound conforms to their upper bound. If a type argument is
+ * infeasible, issue and error and continue with upper bound.
+ */
+ def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp match {
+ case tp: RefinedType =>
+ tp.derivedRefinedType(tp.parent, tp.refinedName, checkFeasible(tp.refinedInfo, pos, where))
+ case tp: RecType =>
+ tp.rebind(tp.parent)
+ case tp @ TypeBounds(lo, hi) if !(lo <:< hi) =>
+ ctx.error(ex"no type exists between low bound $lo and high bound $hi$where", pos)
+ TypeAlias(hi)
+ case _ =>
+ tp
+ }
+
+ /** Check that `tree` is a pure expression of constant type */
+ def checkInlineConformant(tree: Tree, what: => String)(implicit ctx: Context): Unit =
+ tree.tpe match {
+ case tp: TermRef if tp.symbol.is(InlineParam) => // ok
+ case tp => tp.widenTermRefExpr match {
+ case tp: ConstantType if isPureExpr(tree) => // ok
+ case tp if defn.isFunctionType(tp) && isPureExpr(tree) => // ok
+ case _ => ctx.error(em"$what must be a constant expression or a function", tree.pos)
+ }
+ }
+
+ /** Check that class does not define same symbol twice */
+ def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = {
+ val seen = new mutable.HashMap[Name, List[Symbol]] {
+ override def default(key: Name) = Nil
+ }
+ typr.println(i"check no double defs $cls")
+
+ def checkDecl(decl: Symbol): Unit = {
+ for (other <- seen(decl.name)) {
+ typr.println(i"conflict? $decl $other")
+ if (decl.matches(other)) {
+ def doubleDefError(decl: Symbol, other: Symbol): Unit = {
+ def ofType = if (decl.isType) "" else em": ${other.info}"
+ def explanation =
+ if (!decl.isRealMethod) ""
+ else "\n (the definitions have matching type signatures)"
+ ctx.error(em"$decl is already defined as $other$ofType$explanation", decl.pos)
+ }
+ if (decl is Synthetic) doubleDefError(other, decl)
+ else doubleDefError(decl, other)
+ }
+ if ((decl is HasDefaultParams) && (other is HasDefaultParams)) {
+ ctx.error(em"two or more overloaded variants of $decl have default arguments")
+ decl resetFlag HasDefaultParams
+ }
+ }
+ seen(decl.name) = decl :: seen(decl.name)
+ }
+
+ cls.info.decls.foreach(checkDecl)
+ cls.info match {
+ case ClassInfo(_, _, _, _, selfSym: Symbol) => checkDecl(selfSym)
+ case _ =>
+ }
+ }
+
+ def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) =
+ if (!ctx.isAfterTyper) {
+ val called = call.tpe.classSymbol
+ if (caller is Trait)
+ ctx.error(i"$caller may not call constructor of $called", call.pos)
+ else if (called.is(Trait) && !caller.mixins.contains(called))
+ ctx.error(i"""$called is already implemented by super${caller.superClass},
+ |its constructor cannot be called again""", call.pos)
+ }
+
+ /** Check that `tpt` does not define a higher-kinded type */
+ def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree =
+ if (tpt.tpe.isHK && !ctx.compilationUnit.isJava) {
+ // be more lenient with missing type params in Java,
+ // needed to make pos/java-interop/t1196 work.
+ errorTree(tpt, ex"missing type parameter for ${tpt.tpe}")
+ }
+ else tpt
+
+ /** Check that `tpt` does not refer to a singleton type */
+ def checkNotSingleton(tpt: Tree, where: String)(implicit ctx: Context): Tree =
+ if (tpt.tpe.isInstanceOf[SingletonType]) {
+ errorTree(tpt, ex"Singleton type ${tpt.tpe} is not allowed $where")
+ }
+ else tpt
+}
+
+trait NoChecking extends Checking {
+ import tpd._
+ override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(implicit ctx: Context): Type = info
+ override def checkValue(tree: Tree, proto: Type)(implicit ctx: Context): tree.type = tree
+ override def checkStable(tp: Type, pos: Position)(implicit ctx: Context): Unit = ()
+ override def checkClassType(tp: Type, pos: Position, traitReq: Boolean, stablePrefixReq: Boolean)(implicit ctx: Context): Type = tp
+ override def checkImplicitParamsNotSingletons(vparamss: List[List[ValDef]])(implicit ctx: Context): Unit = ()
+ override def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp
+ override def checkInlineConformant(tree: Tree, what: => String)(implicit ctx: Context) = ()
+ override def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = ()
+ override def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) = ()
+ override def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = tpt
+ override def checkNotSingleton(tpt: Tree, where: String)(implicit ctx: Context): Tree = tpt
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
new file mode 100644
index 000000000..68a5d05f5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
@@ -0,0 +1,182 @@
+package dotty.tools.dotc
+package typer
+
+import java.lang.ArithmeticException
+
+import ast._
+import Trees._
+import core._
+import Types._
+import Constants._
+import Names._
+import StdNames._
+import Contexts._
+
+object ConstFold {
+
+ import tpd._
+
+ /** If tree is a constant operation, replace with result. */
+ def apply(tree: Tree)(implicit ctx: Context): Tree = finish(tree) {
+ tree match {
+ case Apply(Select(xt, op), yt :: Nil) =>
+ xt.tpe.widenTermRefExpr match {
+ case ConstantType(x) =>
+ yt.tpe.widenTermRefExpr match {
+ case ConstantType(y) => foldBinop(op, x, y)
+ case _ => null
+ }
+ case _ => null
+ }
+ case Select(xt, op) =>
+ xt.tpe.widenTermRefExpr match {
+ case ConstantType(x) => foldUnop(op, x)
+ case _ => null
+ }
+ case _ => null
+ }
+ }
+
+ /** If tree is a constant value that can be converted to type `pt`, perform
+ * the conversion.
+ */
+ def apply(tree: Tree, pt: Type)(implicit ctx: Context): Tree =
+ finish(apply(tree)) {
+ tree.tpe.widenTermRefExpr match {
+ case ConstantType(x) => x convertTo pt
+ case _ => null
+ }
+ }
+
+ private def finish(tree: Tree)(compX: => Constant)(implicit ctx: Context): Tree =
+ try {
+ val x = compX
+ if (x ne null) tree withType ConstantType(x)
+ else tree
+ } catch {
+ case _: ArithmeticException => tree // the code will crash at runtime,
+ // but that is better than the
+ // compiler itself crashing
+ }
+
+ private def foldUnop(op: Name, x: Constant): Constant = (op, x.tag) match {
+ case (nme.UNARY_!, BooleanTag) => Constant(!x.booleanValue)
+
+ case (nme.UNARY_~ , IntTag ) => Constant(~x.intValue)
+ case (nme.UNARY_~ , LongTag ) => Constant(~x.longValue)
+
+ case (nme.UNARY_+ , IntTag ) => Constant(x.intValue)
+ case (nme.UNARY_+ , LongTag ) => Constant(x.longValue)
+ case (nme.UNARY_+ , FloatTag ) => Constant(x.floatValue)
+ case (nme.UNARY_+ , DoubleTag ) => Constant(x.doubleValue)
+
+ case (nme.UNARY_- , IntTag ) => Constant(-x.intValue)
+ case (nme.UNARY_- , LongTag ) => Constant(-x.longValue)
+ case (nme.UNARY_- , FloatTag ) => Constant(-x.floatValue)
+ case (nme.UNARY_- , DoubleTag ) => Constant(-x.doubleValue)
+
+ case _ => null
+ }
+
+ /** These are local helpers to keep foldBinop from overly taxing the
+ * optimizer.
+ */
+ private def foldBooleanOp(op: Name, x: Constant, y: Constant): Constant = op match {
+ case nme.ZOR => Constant(x.booleanValue | y.booleanValue)
+ case nme.OR => Constant(x.booleanValue | y.booleanValue)
+ case nme.XOR => Constant(x.booleanValue ^ y.booleanValue)
+ case nme.ZAND => Constant(x.booleanValue & y.booleanValue)
+ case nme.AND => Constant(x.booleanValue & y.booleanValue)
+ case nme.EQ => Constant(x.booleanValue == y.booleanValue)
+ case nme.NE => Constant(x.booleanValue != y.booleanValue)
+ case _ => null
+ }
+ private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant = op match {
+ case nme.OR => Constant(x.intValue | y.intValue)
+ case nme.XOR => Constant(x.intValue ^ y.intValue)
+ case nme.AND => Constant(x.intValue & y.intValue)
+ case nme.LSL => Constant(x.intValue << y.intValue)
+ case nme.LSR => Constant(x.intValue >>> y.intValue)
+ case nme.ASR => Constant(x.intValue >> y.intValue)
+ case nme.EQ => Constant(x.intValue == y.intValue)
+ case nme.NE => Constant(x.intValue != y.intValue)
+ case nme.LT => Constant(x.intValue < y.intValue)
+ case nme.GT => Constant(x.intValue > y.intValue)
+ case nme.LE => Constant(x.intValue <= y.intValue)
+ case nme.GE => Constant(x.intValue >= y.intValue)
+ case nme.ADD => Constant(x.intValue + y.intValue)
+ case nme.SUB => Constant(x.intValue - y.intValue)
+ case nme.MUL => Constant(x.intValue * y.intValue)
+ case nme.DIV => Constant(x.intValue / y.intValue)
+ case nme.MOD => Constant(x.intValue % y.intValue)
+ case _ => null
+ }
+ private def foldLongOp(op: Name, x: Constant, y: Constant): Constant = op match {
+ case nme.OR => Constant(x.longValue | y.longValue)
+ case nme.XOR => Constant(x.longValue ^ y.longValue)
+ case nme.AND => Constant(x.longValue & y.longValue)
+ case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSR => Constant(x.longValue >>> y.longValue)
+ case nme.ASR => Constant(x.longValue >> y.longValue)
+ case nme.EQ => Constant(x.longValue == y.longValue)
+ case nme.NE => Constant(x.longValue != y.longValue)
+ case nme.LT => Constant(x.longValue < y.longValue)
+ case nme.GT => Constant(x.longValue > y.longValue)
+ case nme.LE => Constant(x.longValue <= y.longValue)
+ case nme.GE => Constant(x.longValue >= y.longValue)
+ case nme.ADD => Constant(x.longValue + y.longValue)
+ case nme.SUB => Constant(x.longValue - y.longValue)
+ case nme.MUL => Constant(x.longValue * y.longValue)
+ case nme.DIV => Constant(x.longValue / y.longValue)
+ case nme.MOD => Constant(x.longValue % y.longValue)
+ case _ => null
+ }
+ private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant = op match {
+ case nme.EQ => Constant(x.floatValue == y.floatValue)
+ case nme.NE => Constant(x.floatValue != y.floatValue)
+ case nme.LT => Constant(x.floatValue < y.floatValue)
+ case nme.GT => Constant(x.floatValue > y.floatValue)
+ case nme.LE => Constant(x.floatValue <= y.floatValue)
+ case nme.GE => Constant(x.floatValue >= y.floatValue)
+ case nme.ADD => Constant(x.floatValue + y.floatValue)
+ case nme.SUB => Constant(x.floatValue - y.floatValue)
+ case nme.MUL => Constant(x.floatValue * y.floatValue)
+ case nme.DIV => Constant(x.floatValue / y.floatValue)
+ case nme.MOD => Constant(x.floatValue % y.floatValue)
+ case _ => null
+ }
+ private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant = op match {
+ case nme.EQ => Constant(x.doubleValue == y.doubleValue)
+ case nme.NE => Constant(x.doubleValue != y.doubleValue)
+ case nme.LT => Constant(x.doubleValue < y.doubleValue)
+ case nme.GT => Constant(x.doubleValue > y.doubleValue)
+ case nme.LE => Constant(x.doubleValue <= y.doubleValue)
+ case nme.GE => Constant(x.doubleValue >= y.doubleValue)
+ case nme.ADD => Constant(x.doubleValue + y.doubleValue)
+ case nme.SUB => Constant(x.doubleValue - y.doubleValue)
+ case nme.MUL => Constant(x.doubleValue * y.doubleValue)
+ case nme.DIV => Constant(x.doubleValue / y.doubleValue)
+ case nme.MOD => Constant(x.doubleValue % y.doubleValue)
+ case _ => null
+ }
+
+ private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
+ val optag =
+ if (x.tag == y.tag) x.tag
+ else if (x.isNumeric && y.isNumeric) math.max(x.tag, y.tag)
+ else NoTag
+
+ try optag match {
+ case BooleanTag => foldBooleanOp(op, x, y)
+ case ByteTag | ShortTag | CharTag | IntTag => foldSubrangeOp(op, x, y)
+ case LongTag => foldLongOp(op, x, y)
+ case FloatTag => foldFloatOp(op, x, y)
+ case DoubleTag => foldDoubleOp(op, x, y)
+ case StringTag if op == nme.ADD => Constant(x.stringValue + y.stringValue)
+ case _ => null
+ }
+ catch {
+ case ex: ArithmeticException => null
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala
new file mode 100644
index 000000000..370844e65
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala
@@ -0,0 +1,56 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import Contexts._, Symbols._, Decorators._, Comments._
+import util.Positions._
+import ast.tpd
+
+trait Docstrings { self: Typer =>
+
+ /** The Docstrings typer will handle the expansion of `@define` and
+ * `@inheritdoc` if there is a `DocContext` present as a property in the
+ * supplied `ctx`.
+ *
+ * It will also type any `@usecase` available in function definitions.
+ */
+ def cookComments(syms: List[Symbol], owner: Symbol)(implicit ctx: Context): Unit =
+ ctx.docCtx.foreach { docbase =>
+ val relevantSyms = syms.filter(docbase.docstring(_).isDefined)
+ relevantSyms.foreach { sym =>
+ expandParentDocs(sym)
+ val usecases = docbase.docstring(sym).map(_.usecases).getOrElse(Nil)
+
+ usecases.foreach { usecase =>
+ enterSymbol(createSymbol(usecase.untpdCode))
+
+ typedStats(usecase.untpdCode :: Nil, owner) match {
+ case List(df: tpd.DefDef) => usecase.tpdCode = df
+ case _ => ctx.error("`@usecase` was not a valid definition", usecase.codePos)
+ }
+ }
+ }
+ }
+
+ private def expandParentDocs(sym: Symbol)(implicit ctx: Context): Unit =
+ ctx.docCtx.foreach { docCtx =>
+ docCtx.docstring(sym).foreach { cmt =>
+ def expandDoc(owner: Symbol): Unit = if (!cmt.isExpanded) {
+ val tplExp = docCtx.templateExpander
+ tplExp.defineVariables(sym)
+
+ val newCmt = cmt
+ .expand(tplExp.expandedDocComment(sym, owner, _))
+ .withUsecases
+
+ docCtx.addDocstring(sym, Some(newCmt))
+ }
+
+ if (sym ne NoSymbol) {
+ expandParentDocs(sym.owner)
+ expandDoc(sym.owner)
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
new file mode 100644
index 000000000..b5ace87d3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
@@ -0,0 +1,104 @@
+package dotty.tools
+package dotc
+package typer
+
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.ast.untpd
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Names.Name
+import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Decorators._
+
+object Dynamic {
+ def isDynamicMethod(name: Name): Boolean =
+ name == nme.applyDynamic || name == nme.selectDynamic || name == nme.updateDynamic || name == nme.applyDynamicNamed
+}
+
+/** Translates selection that does not typecheck according to the scala.Dynamic rules:
+ * foo.bar(baz) = quux ~~> foo.selectDynamic(bar).update(baz, quux)
+ * foo.bar = baz ~~> foo.updateDynamic("bar")(baz)
+ * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
+ * foo.bar ~~> foo.selectDynamic(bar)
+ *
+ * The first matching rule of is applied.
+ */
+trait Dynamic { self: Typer with Applications =>
+ import Dynamic._
+ import tpd._
+
+ /** Translate selection that does not typecheck according to the normal rules into a applyDynamic/applyDynamicNamed.
+ * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
+ * foo.bar[T0, ...](baz0, baz1, ...) ~~> foo.applyDynamic[T0, ...](bar)(baz0, baz1, ...)
+ * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ * foo.bar[T0, ...](x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed[T0, ...]("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ */
+ def typedDynamicApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
+ def typedDynamicApply(qual: untpd.Tree, name: Name, targs: List[untpd.Tree]): Tree = {
+ def isNamedArg(arg: untpd.Tree): Boolean = arg match { case NamedArg(_, _) => true; case _ => false }
+ val args = tree.args
+ val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic
+ if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) {
+ ctx.error("applyDynamicNamed does not support passing a vararg parameter", tree.pos)
+ tree.withType(ErrorType)
+ } else {
+ def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg))
+ def namedArgs = args.map {
+ case NamedArg(argName, arg) => namedArgTuple(argName.toString, arg)
+ case arg => namedArgTuple("", arg)
+ }
+ val args1 = if (dynName == nme.applyDynamic) args else namedArgs
+ typedApply(untpd.Apply(coreDynamic(qual, dynName, name, targs), args1), pt)
+ }
+ }
+
+ tree.fun match {
+ case Select(qual, name) if !isDynamicMethod(name) =>
+ typedDynamicApply(qual, name, Nil)
+ case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
+ typedDynamicApply(qual, name, targs)
+ case TypeApply(fun, targs) =>
+ typedDynamicApply(fun, nme.apply, targs)
+ case fun =>
+ typedDynamicApply(fun, nme.apply, Nil)
+ }
+ }
+
+ /** Translate selection that does not typecheck according to the normal rules into a selectDynamic.
+ * foo.bar ~~> foo.selectDynamic(bar)
+ * foo.bar[T0, ...] ~~> foo.selectDynamic[T0, ...](bar)
+ *
+ * Note: inner part of translation foo.bar(baz) = quux ~~> foo.selectDynamic(bar).update(baz, quux) is achieved
+ * through an existing transformation of in typedAssign [foo.bar(baz) = quux ~~> foo.bar.update(baz, quux)].
+ */
+ def typedDynamicSelect(tree: untpd.Select, targs: List[Tree], pt: Type)(implicit ctx: Context): Tree =
+ typedApply(coreDynamic(tree.qualifier, nme.selectDynamic, tree.name, targs), pt)
+
+ /** Translate selection that does not typecheck according to the normal rules into a updateDynamic.
+ * foo.bar = baz ~~> foo.updateDynamic(bar)(baz)
+ */
+ def typedDynamicAssign(tree: untpd.Assign, pt: Type)(implicit ctx: Context): Tree = {
+ def typedDynamicAssign(qual: untpd.Tree, name: Name, targs: List[untpd.Tree]): Tree =
+ typedApply(untpd.Apply(coreDynamic(qual, nme.updateDynamic, name, targs), tree.rhs), pt)
+ tree.lhs match {
+ case Select(qual, name) if !isDynamicMethod(name) =>
+ typedDynamicAssign(qual, name, Nil)
+ case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
+ typedDynamicAssign(qual, name, targs)
+ case _ =>
+ ctx.error("reassignment to val", tree.pos)
+ tree.withType(ErrorType)
+ }
+ }
+
+ private def coreDynamic(qual: untpd.Tree, dynName: Name, name: Name, targs: List[untpd.Tree])(implicit ctx: Context): untpd.Apply = {
+ val select = untpd.Select(qual, dynName)
+ val selectWithTypes =
+ if (targs.isEmpty) select
+ else untpd.TypeApply(select, targs)
+ untpd.Apply(selectWithTypes, Literal(Constant(name.toString)))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
new file mode 100644
index 000000000..a18c83ff8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
@@ -0,0 +1,153 @@
+package dotty.tools
+package dotc
+package typer
+
+import ast._
+import core._
+import Trees._
+import Types._, ProtoTypes._, Contexts._, Decorators._, Denotations._, Symbols._
+import Applications._, Implicits._, Flags._
+import util.Positions._
+import printing.{Showable, RefinedPrinter}
+import scala.collection.mutable
+import java.util.regex.Matcher.quoteReplacement
+import reporting.diagnostic.Message
+import reporting.diagnostic.messages._
+
+object ErrorReporting {
+
+ import tpd._
+
+ def errorTree(tree: untpd.Tree, msg: => Message)(implicit ctx: Context): tpd.Tree =
+ tree withType errorType(msg, tree.pos)
+
+ def errorType(msg: => Message, pos: Position)(implicit ctx: Context): ErrorType = {
+ ctx.error(msg, pos)
+ ErrorType
+ }
+
+ def cyclicErrorMsg(ex: CyclicReference)(implicit ctx: Context) = {
+ val cycleSym = ex.denot.symbol
+ def errorMsg(msg: String, cx: Context): String =
+ if (cx.mode is Mode.InferringReturnType) {
+ cx.tree match {
+ case tree: untpd.ValOrDefDef =>
+ // Dotty deviation: Was Trees.ValOrDefDef[_], but this gives ValOrDefDef[Nothing] instead of
+ // ValOrDefDel[Null]. Scala handles it, but it looks accidental because bounds propagation
+ // fails if the parameter is invariant or cotravariant.
+ // See test pending/pos/boundspropagation.scala
+ val treeSym = ctx.symOfContextTree(tree)
+ if (treeSym.exists && treeSym.name == cycleSym.name && treeSym.owner == cycleSym.owner) {
+ val result = if (cycleSym is Method) " result" else ""
+ em"overloaded or recursive $cycleSym needs$result type"
+ }
+ else errorMsg(msg, cx.outer)
+ case _ =>
+ errorMsg(msg, cx.outer)
+ }
+ } else msg
+ errorMsg(ex.show, ctx)
+ }
+
+ def wrongNumberOfArgs(fntpe: Type, kind: String, expectedArgs: List[TypeParamInfo], actual: List[untpd.Tree], pos: Position)(implicit ctx: Context) =
+ errorType(WrongNumberOfArgs(fntpe, kind, expectedArgs, actual)(ctx), pos)
+
+ class Errors(implicit ctx: Context) {
+
+ /** An explanatory note to be added to error messages
+ * when there's a problem with abstract var defs */
+ def abstractVarMessage(sym: Symbol): String =
+ if (sym.underlyingSymbol.is(Mutable))
+ "\n(Note that variables need to be initialized to be defined)"
+ else ""
+
+ def expectedTypeStr(tp: Type): String = tp match {
+ case tp: PolyProto =>
+ em"type arguments [${tp.targs}%, %] and ${expectedTypeStr(tp.resultType)}"
+ case tp: FunProto =>
+ val result = tp.resultType match {
+ case _: WildcardType | _: IgnoredProto => ""
+ case tp => em" and expected result type $tp"
+ }
+ em"arguments (${tp.typedArgs.tpes}%, %)$result"
+ case _ =>
+ em"expected type $tp"
+ }
+
+ def anonymousTypeMemberStr(tpe: Type) = {
+ val kind = tpe match {
+ case _: TypeBounds => "type with bounds"
+ case _: PolyType | _: MethodType => "method"
+ case _ => "value of type"
+ }
+ em"$kind $tpe"
+ }
+
+ def overloadedAltsStr(alts: List[SingleDenotation]) =
+ em"overloaded alternatives of ${denotStr(alts.head)} with types\n" +
+ em" ${alts map (_.info)}%\n %"
+
+ def denotStr(denot: Denotation): String =
+ if (denot.isOverloaded) overloadedAltsStr(denot.alternatives)
+ else if (denot.symbol.exists) denot.symbol.showLocated
+ else anonymousTypeMemberStr(denot.info)
+
+ def refStr(tp: Type): String = tp match {
+ case tp: NamedType => denotStr(tp.denot)
+ case _ => anonymousTypeMemberStr(tp)
+ }
+
+ def exprStr(tree: Tree): String = refStr(tree.tpe)
+
+ def patternConstrStr(tree: Tree): String = ???
+
+ def typeMismatch(tree: Tree, pt: Type, implicitFailure: SearchFailure = NoImplicitMatches): Tree =
+ errorTree(tree, typeMismatchMsg(normalize(tree.tpe, pt), pt, implicitFailure.postscript))
+
+ /** A subtype log explaining why `found` does not conform to `expected` */
+ def whyNoMatchStr(found: Type, expected: Type) =
+ if (ctx.settings.explaintypes.value)
+ "\n" + ctx.typerState.show + "\n" + TypeComparer.explained((found <:< expected)(_))
+ else
+ ""
+
+ def typeMismatchMsg(found: Type, expected: Type, postScript: String = "") = {
+ // replace constrained polyparams and their typevars by their bounds where possible
+ object reported extends TypeMap {
+ def setVariance(v: Int) = variance = v
+ val constraint = ctx.typerState.constraint
+ def apply(tp: Type): Type = tp match {
+ case tp: PolyParam =>
+ constraint.entry(tp) match {
+ case bounds: TypeBounds =>
+ if (variance < 0) apply(constraint.fullUpperBound(tp))
+ else if (variance > 0) apply(constraint.fullLowerBound(tp))
+ else tp
+ case NoType => tp
+ case instType => apply(instType)
+ }
+ case tp: TypeVar => apply(tp.stripTypeVar)
+ case _ => mapOver(tp)
+ }
+ }
+ val found1 = reported(found)
+ reported.setVariance(-1)
+ val expected1 = reported(expected)
+ TypeMismatch(found1, expected1, whyNoMatchStr(found, expected), postScript)
+ }
+
+ /** Format `raw` implicitNotFound argument, replacing all
+ * occurrences of `${X}` where `X` is in `paramNames` with the
+ * corresponding shown type in `args`.
+ */
+ def implicitNotFoundString(raw: String, paramNames: List[String], args: List[Type]): String = {
+ def translate(name: String): Option[String] = {
+ val idx = paramNames.indexOf(name)
+ if (idx >= 0) Some(quoteReplacement(ex"${args(idx)}")) else None
+ }
+ """\$\{\w*\}""".r.replaceSomeIn(raw, m => translate(m.matched.drop(2).init))
+ }
+ }
+
+ def err(implicit ctx: Context): Errors = new Errors
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
new file mode 100644
index 000000000..c390ae808
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
@@ -0,0 +1,191 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast.{Trees, untpd, tpd, TreeInfo}
+import Contexts._
+import Types._
+import Flags._
+import NameOps._
+import Symbols._
+import Decorators._
+import Names._
+import StdNames._
+import Trees._
+import Inferencing._
+import util.Positions._
+import collection.mutable
+
+object EtaExpansion {
+
+ import tpd._
+
+ private def lift(defs: mutable.ListBuffer[Tree], expr: Tree, prefix: String = "")(implicit ctx: Context): Tree =
+ if (isPureExpr(expr)) expr
+ else {
+ val name = ctx.freshName(prefix).toTermName
+ val liftedType = fullyDefinedType(expr.tpe.widen, "lifted expression", expr.pos)
+ val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, liftedType, coord = positionCoord(expr.pos))
+ defs += ValDef(sym, expr)
+ ref(sym.valRef)
+ }
+
+ /** Lift out common part of lhs tree taking part in an operator assignment such as
+ *
+ * lhs += expr
+ */
+ def liftAssigned(defs: mutable.ListBuffer[Tree], tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case Apply(MaybePoly(fn @ Select(pre, name), targs), args) =>
+ cpy.Apply(tree)(
+ cpy.Select(fn)(
+ lift(defs, pre), name).appliedToTypeTrees(targs),
+ liftArgs(defs, fn.tpe, args))
+ case Select(pre, name) =>
+ cpy.Select(tree)(lift(defs, pre), name)
+ case _ =>
+ tree
+ }
+
+ /** Lift a function argument, stripping any NamedArg wrapper */
+ def liftArg(defs: mutable.ListBuffer[Tree], arg: Tree, prefix: String = "")(implicit ctx: Context): Tree =
+ arg match {
+ case arg @ NamedArg(name, arg1) => cpy.NamedArg(arg)(name, lift(defs, arg1, prefix))
+ case arg => lift(defs, arg, prefix)
+ }
+
+ /** Lift arguments that are not-idempotent into ValDefs in buffer `defs`
+ * and replace by the idents of so created ValDefs.
+ */
+ def liftArgs(defs: mutable.ListBuffer[Tree], methRef: Type, args: List[Tree])(implicit ctx: Context) =
+ methRef.widen match {
+ case MethodType(paramNames, paramTypes) =>
+ (args, paramNames, paramTypes).zipped map { (arg, name, tp) =>
+ if (tp.isInstanceOf[ExprType]) arg
+ else liftArg(defs, arg, if (name contains '$') "" else name.toString + "$")
+ }
+ case _ =>
+ args map (liftArg(defs, _))
+ }
+
+ /** Lift out function prefix and all arguments from application
+ *
+ * pre.f(arg1, ..., argN) becomes
+ *
+ * val x0 = pre
+ * val x1 = arg1
+ * ...
+ * val xN = argN
+ * x0.f(x1, ..., xN)
+ *
+ * But leave idempotent expressions alone.
+ *
+ */
+ def liftApp(defs: mutable.ListBuffer[Tree], tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case Apply(fn, args) =>
+ cpy.Apply(tree)(liftApp(defs, fn), liftArgs(defs, fn.tpe, args))
+ case TypeApply(fn, targs) =>
+ cpy.TypeApply(tree)(liftApp(defs, fn), targs)
+ case Select(pre, name) if isPureRef(tree) =>
+ cpy.Select(tree)(liftPrefix(defs, pre), name)
+ case Block(stats, expr) =>
+ liftApp(defs ++= stats, expr)
+ case New(tpt) =>
+ tree
+ case _ =>
+ lift(defs, tree)
+ }
+
+ /** Lift prefix `pre` of an application `pre.f(...)` to
+ *
+ * val x0 = pre
+ * x0.f(...)
+ *
+ * unless `pre` is a `New` or `pre` is idempotent.
+ */
+ def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case New(_) => tree
+ case _ => if (isIdempotentExpr(tree)) tree else lift(defs, tree)
+ }
+
+ /** Eta-expanding a tree means converting a method reference to a function value.
+ * @param tree The tree to expand
+ * @param mt The type of the method reference
+ * @param xarity The arity of the expected function type
+ * and assume the lifted application of `tree` (@see liftApp) is
+ *
+ * { val xs = es; expr }
+ *
+ * If xarity matches the number of parameters in `mt`, the eta-expansion is
+ *
+ * { val xs = es; (x1, ..., xn) => expr(x1, ..., xn) }
+ *
+ * Note that the function value's parameters are untyped, hence the type will
+ * be supplied by the environment (or if missing be supplied by the target
+ * method as a fallback). On the other hand, if `xarity` is different from
+ * the number of parameters in `mt`, then we cannot propagate parameter types
+ * from the expected type, and we fallback to using the method's original
+ * parameter types instead.
+ *
+ * In either case, the result is an untyped tree, with `es` and `expr` as typed splices.
+ */
+ def etaExpand(tree: Tree, mt: MethodType, xarity: Int)(implicit ctx: Context): untpd.Tree = {
+ import untpd._
+ assert(!ctx.isAfterTyper)
+ val defs = new mutable.ListBuffer[tpd.Tree]
+ val lifted: Tree = TypedSplice(liftApp(defs, tree))
+ val paramTypes: List[Tree] =
+ if (mt.paramTypes.length == xarity) mt.paramTypes map (_ => TypeTree())
+ else mt.paramTypes map TypeTree
+ val params = (mt.paramNames, paramTypes).zipped.map((name, tpe) =>
+ ValDef(name, tpe, EmptyTree).withFlags(Synthetic | Param).withPos(tree.pos))
+ var ids: List[Tree] = mt.paramNames map (name => Ident(name).withPos(tree.pos))
+ if (mt.paramTypes.nonEmpty && mt.paramTypes.last.isRepeatedParam)
+ ids = ids.init :+ repeated(ids.last)
+ var body: Tree = Apply(lifted, ids)
+ mt.resultType match {
+ case rt: MethodType if !rt.isImplicit => body = PostfixOp(body, nme.WILDCARD)
+ case _ =>
+ }
+ val fn = untpd.Function(params, body)
+ if (defs.nonEmpty) untpd.Block(defs.toList map (untpd.TypedSplice(_)), fn) else fn
+ }
+}
+
+ /** <p> not needed
+ * Expand partial function applications of type `type`.
+ * </p><pre>
+ * p.f(es_1)...(es_n)
+ * ==> {
+ * <b>private synthetic val</b> eta$f = p.f // if p is not stable
+ * ...
+ * <b>private synthetic val</b> eta$e_i = e_i // if e_i is not stable
+ * ...
+ * (ps_1 => ... => ps_m => eta$f([es_1])...([es_m])(ps_1)...(ps_m))
+ * }</pre>
+ * <p>
+ * tree is already attributed
+ * </p>
+ def etaExpandUntyped(tree: Tree)(implicit ctx: Context): untpd.Tree = { // kept as a reserve for now
+ def expand(tree: Tree): untpd.Tree = tree.tpe match {
+ case mt @ MethodType(paramNames, paramTypes) if !mt.isImplicit =>
+ val paramsArgs: List[(untpd.ValDef, untpd.Tree)] =
+ (paramNames, paramTypes).zipped.map { (name, tp) =>
+ val droppedStarTpe = defn.underlyingOfRepeated(tp)
+ val param = ValDef(
+ Modifiers(Param), name,
+ untpd.TypedSplice(TypeTree(droppedStarTpe)), untpd.EmptyTree)
+ var arg: untpd.Tree = Ident(name)
+ if (defn.isRepeatedParam(tp))
+ arg = Typed(arg, Ident(tpnme.WILDCARD_STAR))
+ (param, arg)
+ }
+ val (params, args) = paramsArgs.unzip
+ untpd.Function(params, Apply(untpd.TypedSplice(tree), args))
+ }
+
+ val defs = new mutable.ListBuffer[Tree]
+ val tree1 = liftApp(defs, tree)
+ Block(defs.toList map untpd.TypedSplice, expand(tree1))
+ }
+ */
diff --git a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
new file mode 100644
index 000000000..c444631ae
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
@@ -0,0 +1,83 @@
+package dotty.tools.dotc
+package typer
+
+import core._
+import Phases._
+import Contexts._
+import Symbols._
+import dotty.tools.dotc.parsing.JavaParsers.JavaParser
+import parsing.Parsers.Parser
+import config.Config
+import config.Printers.{typr, default}
+import util.Stats._
+import scala.util.control.NonFatal
+import ast.Trees._
+
+class FrontEnd extends Phase {
+
+ override def phaseName = "frontend"
+ override def isTyper = true
+ import ast.tpd
+
+ def monitor(doing: String)(body: => Unit)(implicit ctx: Context) =
+ try body
+ catch {
+ case NonFatal(ex) =>
+ ctx.echo(s"exception occurred while $doing ${ctx.compilationUnit}")
+ throw ex
+ }
+
+ def parse(implicit ctx: Context) = monitor("parsing") {
+ val unit = ctx.compilationUnit
+ unit.untpdTree =
+ if (unit.isJava) new JavaParser(unit.source).parse()
+ else new Parser(unit.source).parse()
+ val printer = if (ctx.settings.Xprint.value.contains("parser")) default else typr
+ printer.println("parsed:\n" + unit.untpdTree.show)
+ if (Config.checkPositions)
+ unit.untpdTree.checkPos(nonOverlapping = !unit.isJava && !ctx.reporter.hasErrors)
+ }
+
+ def enterSyms(implicit ctx: Context) = monitor("indexing") {
+ val unit = ctx.compilationUnit
+ ctx.typer.index(unit.untpdTree)
+ typr.println("entered: " + unit.source)
+ }
+
+ def typeCheck(implicit ctx: Context) = monitor("typechecking") {
+ val unit = ctx.compilationUnit
+ unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree)
+ typr.println("typed: " + unit.source)
+ record("retained untyped trees", unit.untpdTree.treeSize)
+ record("retained typed trees after typer", unit.tpdTree.treeSize)
+ }
+
+ private def firstTopLevelDef(trees: List[tpd.Tree])(implicit ctx: Context): Symbol = trees match {
+ case PackageDef(_, defs) :: _ => firstTopLevelDef(defs)
+ case Import(_, _) :: defs => firstTopLevelDef(defs)
+ case (tree @ TypeDef(_, _)) :: _ => tree.symbol
+ case _ => NoSymbol
+ }
+
+ protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
+ unit.isJava || firstTopLevelDef(unit.tpdTree :: Nil).isPrimitiveValueClass
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
+ val unitContexts = for (unit <- units) yield {
+ ctx.inform(s"compiling ${unit.source}")
+ ctx.fresh.setCompilationUnit(unit)
+ }
+ unitContexts foreach (parse(_))
+ record("parsedTrees", ast.Trees.ntrees)
+ unitContexts foreach (enterSyms(_))
+ unitContexts foreach (typeCheck(_))
+ record("total trees after typer", ast.Trees.ntrees)
+ unitContexts.map(_.compilationUnit).filterNot(discardAfterTyper)
+ }
+
+ override def run(implicit ctx: Context): Unit = {
+ parse
+ enterSyms
+ typeCheck
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
new file mode 100644
index 000000000..f3dceea71
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
@@ -0,0 +1,844 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast.{Trees, untpd, tpd, TreeInfo}
+import util.Positions._
+import util.Stats.{track, record, monitored}
+import printing.Showable
+import Contexts._
+import Types._
+import Flags._
+import TypeErasure.{erasure, hasStableErasure}
+import Mode.ImplicitsEnabled
+import Denotations._
+import NameOps._
+import SymDenotations._
+import Symbols._
+import Types._
+import Decorators._
+import Names._
+import StdNames._
+import Constants._
+import Applications._
+import ProtoTypes._
+import ErrorReporting._
+import Inferencing.fullyDefinedType
+import Trees._
+import Hashable._
+import config.Config
+import config.Printers.{implicits, implicitsDetailed}
+import collection.mutable
+
+/** Implicit resolution */
+object Implicits {
+
+ /** A common base class of contextual implicits and of-type implicits which
+ * represents a set of implicit references.
+ */
+ abstract class ImplicitRefs(initctx: Context) {
+ implicit val ctx: Context =
+ if (initctx == NoContext) initctx else initctx retractMode Mode.ImplicitsEnabled
+
+ /** The implicit references */
+ def refs: List[TermRef]
+
+ /** Return those references in `refs` that are compatible with type `pt`. */
+ protected def filterMatching(pt: Type)(implicit ctx: Context): List[TermRef] = track("filterMatching") {
+
+ def refMatches(ref: TermRef)(implicit ctx: Context) = /*ctx.traceIndented(i"refMatches $ref $pt")*/ {
+
+ def discardForView(tpw: Type, argType: Type): Boolean = tpw match {
+ case mt: MethodType =>
+ mt.isImplicit ||
+ mt.paramTypes.length != 1 ||
+ !(argType relaxed_<:< mt.paramTypes.head)(ctx.fresh.setExploreTyperState)
+ case poly: PolyType =>
+ // We do not need to call ProtoTypes#constrained on `poly` because
+ // `refMatches` is always called with mode TypevarsMissContext enabled.
+ poly.resultType match {
+ case mt: MethodType =>
+ mt.isImplicit ||
+ mt.paramTypes.length != 1 ||
+ !(argType relaxed_<:< wildApprox(mt.paramTypes.head)(ctx.fresh.setExploreTyperState))
+ case rtp =>
+ discardForView(wildApprox(rtp), argType)
+ }
+ case tpw: TermRef =>
+ false // can't discard overloaded refs
+ case tpw =>
+ //if (ctx.typer.isApplicable(tp, argType :: Nil, resultType))
+ // println(i"??? $tp is applicable to $this / typeSymbol = ${tpw.typeSymbol}")
+ !tpw.derivesFrom(defn.FunctionClass(1)) ||
+ ref.symbol == defn.Predef_conforms //
+ // as an implicit conversion, Predef.$conforms is a no-op, so exclude it
+ }
+
+ def discardForValueType(tpw: Type): Boolean = tpw match {
+ case mt: MethodType => !mt.isImplicit
+ case mt: PolyType => discardForValueType(tpw.resultType)
+ case _ => false
+ }
+
+ def discard = pt match {
+ case pt: ViewProto => discardForView(ref.widen, pt.argType)
+ case _: ValueTypeOrProto => !defn.isFunctionType(pt) && discardForValueType(ref.widen)
+ case _ => false
+ }
+
+ (ref.symbol isAccessibleFrom ref.prefix) && {
+ if (discard) {
+ record("discarded eligible")
+ false
+ }
+ else NoViewsAllowed.isCompatible(normalize(ref, pt), pt)
+ }
+ }
+
+ if (refs.isEmpty) refs
+ else refs filter (refMatches(_)(ctx.fresh.addMode(Mode.TypevarsMissContext).setExploreTyperState)) // create a defensive copy of ctx to avoid constraint pollution
+ }
+ }
+
+ /** The implicit references coming from the implicit scope of a type.
+ * @param tp the type determining the implicit scope
+ * @param companionRefs the companion objects in the implicit scope.
+ */
+ class OfTypeImplicits(tp: Type, val companionRefs: TermRefSet)(initctx: Context) extends ImplicitRefs(initctx) {
+ assert(initctx.typer != null)
+ lazy val refs: List[TermRef] = {
+ val buf = new mutable.ListBuffer[TermRef]
+ for (companion <- companionRefs) buf ++= companion.implicitMembers
+ buf.toList
+ }
+
+ /** The implicit references that are eligible for expected type `tp` */
+ lazy val eligible: List[TermRef] =
+ /*>|>*/ track("eligible in tpe") /*<|<*/ {
+ /*>|>*/ ctx.traceIndented(i"eligible($tp), companions = ${companionRefs.toList}%, %", implicitsDetailed, show = true) /*<|<*/ {
+ if (refs.nonEmpty && monitored) record(s"check eligible refs in tpe", refs.length)
+ filterMatching(tp)
+ }
+ }
+
+ override def toString =
+ i"OfTypeImplicits($tp), companions = ${companionRefs.toList}%, %; refs = $refs%, %."
+ }
+
+ /** The implicit references coming from the context.
+ * @param refs the implicit references made visible by the current context.
+ * Note: The name of the reference might be different from the name of its symbol.
+ * In the case of a renaming import a => b, the name of the reference is the renamed
+ * name, b, whereas the name of the symbol is the original name, a.
+ * @param outerCtx the next outer context that makes visible further implicits
+ */
+ class ContextualImplicits(val refs: List[TermRef], val outerImplicits: ContextualImplicits)(initctx: Context) extends ImplicitRefs(initctx) {
+ private val eligibleCache = new mutable.AnyRefMap[Type, List[TermRef]]
+
+ /** The implicit references that are eligible for type `tp`. */
+ def eligible(tp: Type): List[TermRef] = /*>|>*/ track(s"eligible in ctx") /*<|<*/ {
+ if (tp.hash == NotCached) computeEligible(tp)
+ else eligibleCache get tp match {
+ case Some(eligibles) =>
+ def elided(ci: ContextualImplicits): Int = {
+ val n = ci.refs.length
+ if (ci.outerImplicits == NoContext.implicits) n
+ else n + elided(ci.outerImplicits)
+ }
+ if (monitored) record(s"elided eligible refs", elided(this))
+ eligibles
+ case None =>
+ val savedEphemeral = ctx.typerState.ephemeral
+ ctx.typerState.ephemeral = false
+ try {
+ val result = computeEligible(tp)
+ if (ctx.typerState.ephemeral) record("ephemeral cache miss: eligible")
+ else eligibleCache(tp) = result
+ result
+ }
+ finally ctx.typerState.ephemeral |= savedEphemeral
+ }
+ }
+
+ private def computeEligible(tp: Type): List[TermRef] = /*>|>*/ ctx.traceIndented(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ {
+ if (monitored) record(s"check eligible refs in ctx", refs.length)
+ val ownEligible = filterMatching(tp)
+ if (outerImplicits == NoContext.implicits) ownEligible
+ else ownEligible ::: {
+ val shadowed = (ownEligible map (_.name)).toSet
+ outerImplicits.eligible(tp) filterNot (ref => shadowed contains ref.name)
+ }
+ }
+
+ override def toString = {
+ val own = s"(implicits: ${refs mkString ","})"
+ if (outerImplicits == NoContext.implicits) own else own + "\n " + outerImplicits
+ }
+
+ /** This context, or a copy, ensuring root import from symbol `root`
+ * is not present in outer implicits.
+ */
+ def exclude(root: Symbol): ContextualImplicits =
+ if (this == NoContext.implicits) this
+ else {
+ val outerExcluded = outerImplicits exclude root
+ if (ctx.importInfo.site.termSymbol == root) outerExcluded
+ else if (outerExcluded eq outerImplicits) this
+ else new ContextualImplicits(refs, outerExcluded)(ctx)
+ }
+ }
+
+ /** The result of an implicit search */
+ abstract class SearchResult
+
+ /** A successful search
+ * @param ref The implicit reference that succeeded
+ * @param tree The typed tree that needs to be inserted
+ * @param ctx The context after the implicit search
+ */
+ case class SearchSuccess(tree: tpd.Tree, ref: TermRef, tstate: TyperState) extends SearchResult {
+ override def toString = s"SearchSuccess($tree, $ref)"
+ }
+
+ /** A failed search */
+ abstract class SearchFailure extends SearchResult {
+ /** A note describing the failure in more detail - this
+ * is either empty or starts with a '\n'
+ */
+ def postscript(implicit ctx: Context): String = ""
+ }
+
+ /** A "no matching implicit found" failure */
+ case object NoImplicitMatches extends SearchFailure
+
+ /** A search failure that can show information about the cause */
+ abstract class ExplainedSearchFailure extends SearchFailure {
+ protected def pt: Type
+ protected def argument: tpd.Tree
+ protected def qualify(implicit ctx: Context) =
+ if (argument.isEmpty) em"match type $pt"
+ else em"convert from ${argument.tpe} to $pt"
+
+ /** An explanation of the cause of the failure as a string */
+ def explanation(implicit ctx: Context): String
+ }
+
+ /** An ambiguous implicits failure */
+ class AmbiguousImplicits(alt1: TermRef, alt2: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
+ def explanation(implicit ctx: Context): String =
+ em"both ${err.refStr(alt1)} and ${err.refStr(alt2)} $qualify"
+ override def postscript(implicit ctx: Context) =
+ "\nNote that implicit conversions cannot be applied because they are ambiguous;" +
+ "\n " + explanation
+ }
+
+ class NonMatchingImplicit(ref: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
+ def explanation(implicit ctx: Context): String =
+ em"${err.refStr(ref)} does not $qualify"
+ }
+
+ class ShadowedImplicit(ref: TermRef, shadowing: Type, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
+ def explanation(implicit ctx: Context): String =
+ em"${err.refStr(ref)} does $qualify but is shadowed by ${err.refStr(shadowing)}"
+ }
+
+ class DivergingImplicit(ref: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
+ def explanation(implicit ctx: Context): String =
+ em"${err.refStr(ref)} produces a diverging implicit search when trying to $qualify"
+ }
+
+ class FailedImplicit(failures: List[ExplainedSearchFailure], val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
+ def explanation(implicit ctx: Context): String =
+ if (failures.isEmpty) s" No implicit candidates were found that $qualify"
+ else " " + (failures map (_.explanation) mkString "\n ")
+ override def postscript(implicit ctx: Context): String =
+ i"""
+ |Implicit search failure summary:
+ |$explanation"""
+ }
+}
+
+import Implicits._
+
+/** Info relating to implicits that is kept for one run */
+trait ImplicitRunInfo { self: RunInfo =>
+
+ private val implicitScopeCache = mutable.AnyRefMap[Type, OfTypeImplicits]()
+
+ /** The implicit scope of a type `tp`
+ * @param liftingCtx A context to be used when computing the class symbols of
+ * a type. Types may contain type variables with their instances
+ * recorded in the current context. To find out the instance of
+ * a type variable, we need the current context, the current
+ * runinfo context does not do.
+ */
+ def implicitScope(tp: Type, liftingCtx: Context): OfTypeImplicits = {
+
+ val seen: mutable.Set[Type] = mutable.Set()
+
+ /** Replace every typeref that does not refer to a class by a conjunction of class types
+ * that has the same implicit scope as the original typeref. The motivation for applying
+ * this map is that it reduces the total number of types for which we need to
+ * compute and cache the implicit scope; all variations wrt type parameters or
+ * abstract types are eliminated.
+ */
+ object liftToClasses extends TypeMap {
+ override implicit protected val ctx: Context = liftingCtx
+ override def stopAtStatic = true
+ def apply(tp: Type) = tp match {
+ case tp: TypeRef if tp.symbol.isAbstractOrAliasType =>
+ val pre = tp.prefix
+ def joinClass(tp: Type, cls: ClassSymbol) =
+ AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner))
+ val lead = if (tp.prefix eq NoPrefix) defn.AnyType else apply(tp.prefix)
+ (lead /: tp.classSymbols)(joinClass)
+ case tp: TypeVar =>
+ apply(tp.underlying)
+ case tp: HKApply =>
+ def applyArg(arg: Type) = arg match {
+ case TypeBounds(lo, hi) => AndType.make(lo, hi)
+ case _: WildcardType => defn.AnyType
+ case _ => arg
+ }
+ (apply(tp.tycon) /: tp.args)((tc, arg) => AndType.make(tc, applyArg(arg)))
+ case tp: PolyType =>
+ apply(tp.resType)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ def iscopeRefs(tp: Type): TermRefSet =
+ if (seen contains tp) EmptyTermRefSet
+ else {
+ seen += tp
+ iscope(tp).companionRefs
+ }
+
+ // todo: compute implicits directly, without going via companionRefs?
+ def collectCompanions(tp: Type): TermRefSet = track("computeImplicitScope") {
+ ctx.traceIndented(i"collectCompanions($tp)", implicits) {
+ val comps = new TermRefSet
+ tp match {
+ case tp: NamedType =>
+ val pre = tp.prefix
+ comps ++= iscopeRefs(pre)
+ def addClassScope(cls: ClassSymbol): Unit = {
+ def addRef(companion: TermRef): Unit = {
+ val compSym = companion.symbol
+ if (compSym is Package)
+ addRef(TermRef.withSig(companion, nme.PACKAGE, Signature.NotAMethod))
+ else if (compSym.exists)
+ comps += companion.asSeenFrom(pre, compSym.owner).asInstanceOf[TermRef]
+ }
+ def addParentScope(parent: TypeRef): Unit = {
+ iscopeRefs(parent) foreach addRef
+ for (param <- parent.typeParamSymbols)
+ comps ++= iscopeRefs(tp.member(param.name).info)
+ }
+ val companion = cls.companionModule
+ if (companion.exists) addRef(companion.valRef)
+ cls.classParents foreach addParentScope
+ }
+ tp.classSymbols(liftingCtx) foreach addClassScope
+ case _ =>
+ // We exclude lower bounds to conform to SLS 7.2:
+ // "The parts of a type T are: [...] if T is an abstract type, the parts of its upper bound"
+ for (part <- tp.namedPartsWith(_.isType, excludeLowerBounds = true))
+ comps ++= iscopeRefs(part)
+ }
+ comps
+ }
+ }
+
+ /** The implicit scope of type `tp`
+ * @param isLifted Type `tp` is the result of a `liftToClasses` application
+ */
+ def iscope(tp: Type, isLifted: Boolean = false): OfTypeImplicits = {
+ def computeIScope(cacheResult: Boolean) = {
+ val savedEphemeral = ctx.typerState.ephemeral
+ ctx.typerState.ephemeral = false
+ try {
+ val liftedTp = if (isLifted) tp else liftToClasses(tp)
+ val refs =
+ if (liftedTp ne tp)
+ iscope(liftedTp, isLifted = true).companionRefs
+ else
+ collectCompanions(tp)
+ val result = new OfTypeImplicits(tp, refs)(ctx)
+ if (ctx.typerState.ephemeral) record("ephemeral cache miss: implicitScope")
+ else if (cacheResult) implicitScopeCache(tp) = result
+ result
+ }
+ finally ctx.typerState.ephemeral |= savedEphemeral
+ }
+
+ if (tp.hash == NotCached || !Config.cacheImplicitScopes)
+ computeIScope(cacheResult = false)
+ else implicitScopeCache get tp match {
+ case Some(is) => is
+ case None =>
+ // Implicit scopes are tricky to cache because of loops. For example
+ // in `tests/pos/implicit-scope-loop.scala`, the scope of B contains
+ // the scope of A which contains the scope of B. We break the loop
+ // by returning EmptyTermRefSet in `collectCompanions` for types
+ // that we have already seen, but this means that we cannot cache
+ // the computed scope of A, it is incomplete.
+ // Keeping track of exactly where these loops happen would require a
+ // lot of book-keeping, instead we choose to be conservative and only
+ // cache scopes before any type has been seen. This is unfortunate
+ // because loops are very common for types in scala.collection.
+ computeIScope(cacheResult = seen.isEmpty)
+ }
+ }
+
+ iscope(tp)
+ }
+
+ /** A map that counts the number of times an implicit ref was picked */
+ val useCount = new mutable.HashMap[TermRef, Int] {
+ override def default(key: TermRef) = 0
+ }
+
+ def clear() = implicitScopeCache.clear()
+}
+
+/** The implicit resolution part of type checking */
+trait Implicits { self: Typer =>
+
+ import tpd._
+
+ override def viewExists(from: Type, to: Type)(implicit ctx: Context): Boolean = (
+ !from.isError
+ && !to.isError
+ && !ctx.isAfterTyper
+ && (ctx.mode is Mode.ImplicitsEnabled)
+ && from.isValueType
+ && ( from.isValueSubType(to)
+ || inferView(dummyTreeOfType(from), to)
+ (ctx.fresh.addMode(Mode.ImplicitExploration).setExploreTyperState)
+ .isInstanceOf[SearchSuccess]
+ )
+ )
+
+ /** Find an implicit conversion to apply to given tree `from` so that the
+ * result is compatible with type `to`.
+ */
+ def inferView(from: Tree, to: Type)(implicit ctx: Context): SearchResult = track("inferView") {
+ if ( (to isRef defn.AnyClass)
+ || (to isRef defn.ObjectClass)
+ || (to isRef defn.UnitClass)
+ || (from.tpe isRef defn.NothingClass)
+ || (from.tpe isRef defn.NullClass)
+ || (from.tpe eq NoPrefix)) NoImplicitMatches
+ else
+ try inferImplicit(to.stripTypeVar.widenExpr, from, from.pos)
+ catch {
+ case ex: AssertionError =>
+ implicits.println(s"view $from ==> $to")
+ implicits.println(ctx.typerState.constraint.show)
+ implicits.println(TypeComparer.explained(implicit ctx => from.tpe <:< to))
+ throw ex
+ }
+ }
+
+ /** Find an implicit argument for parameter `formal`.
+ * @param error An error handler that gets an error message parameter
+ * which is itself parameterized by another string,
+ * indicating where the implicit parameter is needed
+ */
+ def inferImplicitArg(formal: Type, error: (String => String) => Unit, pos: Position)(implicit ctx: Context): Tree =
+ inferImplicit(formal, EmptyTree, pos) match {
+ case SearchSuccess(arg, _, _) =>
+ arg
+ case ambi: AmbiguousImplicits =>
+ error(where => s"ambiguous implicits: ${ambi.explanation} of $where")
+ EmptyTree
+ case failure: SearchFailure =>
+ val arg = synthesizedClassTag(formal, pos)
+ if (!arg.isEmpty) arg
+ else {
+ var msgFn = (where: String) =>
+ em"no implicit argument of type $formal found for $where" + failure.postscript
+ for {
+ notFound <- formal.typeSymbol.getAnnotation(defn.ImplicitNotFoundAnnot)
+ Trees.Literal(Constant(raw: String)) <- notFound.argument(0)
+ } {
+ msgFn = where =>
+ err.implicitNotFoundString(
+ raw,
+ formal.typeSymbol.typeParams.map(_.name.unexpandedName.toString),
+ formal.argInfos)
+ }
+ error(msgFn)
+ EmptyTree
+ }
+ }
+
+ /** If `formal` is of the form ClassTag[T], where `T` is a class type,
+ * synthesize a class tag for `T`.
+ */
+ def synthesizedClassTag(formal: Type, pos: Position)(implicit ctx: Context): Tree = {
+ if (formal.isRef(defn.ClassTagClass))
+ formal.argTypes match {
+ case arg :: Nil =>
+ val tp = fullyDefinedType(arg, "ClassTag argument", pos)
+ if (hasStableErasure(tp))
+ return ref(defn.ClassTagModule)
+ .select(nme.apply)
+ .appliedToType(tp)
+ .appliedTo(clsOf(erasure(tp)))
+ .withPos(pos)
+ case _ =>
+ }
+ EmptyTree
+ }
+
+ private def assumedCanEqual(ltp: Type, rtp: Type)(implicit ctx: Context) = {
+ val lift = new TypeMap {
+ def apply(t: Type) = t match {
+ case t: TypeRef =>
+ t.info match {
+ case TypeBounds(lo, hi) if lo ne hi => hi
+ case _ => t
+ }
+ case _ =>
+ if (variance > 0) mapOver(t) else t
+ }
+ }
+ ltp.isError || rtp.isError || ltp <:< lift(rtp) || rtp <:< lift(ltp)
+ }
+
+ /** Check that equality tests between types `ltp` and `rtp` make sense */
+ def checkCanEqual(ltp: Type, rtp: Type, pos: Position)(implicit ctx: Context): Unit =
+ if (!ctx.isAfterTyper && !assumedCanEqual(ltp, rtp)) {
+ val res = inferImplicitArg(
+ defn.EqType.appliedTo(ltp, rtp), msgFun => ctx.error(msgFun(""), pos), pos)
+ implicits.println(i"Eq witness found: $res: ${res.tpe}")
+ }
+
+ /** Find an implicit parameter or conversion.
+ * @param pt The expected type of the parameter or conversion.
+ * @param argument If an implicit conversion is searched, the argument to which
+ * it should be applied, EmptyTree otherwise.
+ * @param pos The position where errors should be reported.
+ * !!! todo: catch potential cycles
+ */
+ def inferImplicit(pt: Type, argument: Tree, pos: Position)(implicit ctx: Context): SearchResult = track("inferImplicit") {
+ assert(!ctx.isAfterTyper,
+ if (argument.isEmpty) i"missing implicit parameter of type $pt after typer"
+ else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}")
+ val prevConstr = ctx.typerState.constraint
+ ctx.traceIndented(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) {
+ assert(!pt.isInstanceOf[ExprType])
+ val isearch =
+ if (ctx.settings.explaintypes.value) new ExplainedImplicitSearch(pt, argument, pos)
+ else new ImplicitSearch(pt, argument, pos)
+ val result = isearch.bestImplicit
+ result match {
+ case result: SearchSuccess =>
+ result.tstate.commit()
+ result
+ case result: AmbiguousImplicits =>
+ val deepPt = pt.deepenProto
+ if (deepPt ne pt) inferImplicit(deepPt, argument, pos)
+ else if (ctx.scala2Mode && !ctx.mode.is(Mode.OldOverloadingResolution)) {
+ inferImplicit(pt, argument, pos)(ctx.addMode(Mode.OldOverloadingResolution)) match {
+ case altResult: SearchSuccess =>
+ ctx.migrationWarning(
+ s"According to new implicit resolution rules, this will be ambiguous:\n ${result.explanation}",
+ pos)
+ altResult
+ case _ =>
+ result
+ }
+ }
+ else result
+ case _ =>
+ assert(prevConstr eq ctx.typerState.constraint)
+ result
+ }
+ }
+ }
+
+ /** An implicit search; parameters as in `inferImplicit` */
+ class ImplicitSearch(protected val pt: Type, protected val argument: Tree, pos: Position)(implicit ctx: Context) {
+
+ private def nestedContext = ctx.fresh.setMode(ctx.mode &~ Mode.ImplicitsEnabled)
+
+ private def implicitProto(resultType: Type, f: Type => Type) =
+ if (argument.isEmpty) f(resultType) else ViewProto(f(argument.tpe.widen), f(resultType))
+ // Not clear whether we need to drop the `.widen` here. All tests pass with it in place, though.
+
+ assert(argument.isEmpty || argument.tpe.isValueType || argument.tpe.isInstanceOf[ExprType],
+ em"found: $argument: ${argument.tpe}, expected: $pt")
+
+ /** The expected type for the searched implicit */
+ lazy val fullProto = implicitProto(pt, identity)
+
+ lazy val funProto = fullProto match {
+ case proto: ViewProto =>
+ FunProto(untpd.TypedSplice(dummyTreeOfType(proto.argType)) :: Nil, proto.resultType, self)
+ case proto => proto
+ }
+
+ /** The expected type where parameters and uninstantiated typevars are replaced by wildcard types */
+ val wildProto = implicitProto(pt, wildApprox(_))
+
+ /** Search failures; overridden in ExplainedImplicitSearch */
+ protected def nonMatchingImplicit(ref: TermRef): SearchFailure = NoImplicitMatches
+ protected def divergingImplicit(ref: TermRef): SearchFailure = NoImplicitMatches
+ protected def shadowedImplicit(ref: TermRef, shadowing: Type): SearchFailure = NoImplicitMatches
+ protected def failedSearch: SearchFailure = NoImplicitMatches
+
+ /** Search a list of eligible implicit references */
+ def searchImplicits(eligible: List[TermRef], contextual: Boolean): SearchResult = {
+ val constr = ctx.typerState.constraint
+
+ /** Try to typecheck an implicit reference */
+ def typedImplicit(ref: TermRef)(implicit ctx: Context): SearchResult = track("typedImplicit") { ctx.traceIndented(i"typed implicit $ref, pt = $pt, implicitsEnabled == ${ctx.mode is ImplicitsEnabled}", implicits, show = true) {
+ assert(constr eq ctx.typerState.constraint)
+ var generated: Tree = tpd.ref(ref).withPos(pos)
+ if (!argument.isEmpty)
+ generated = typedUnadapted(
+ untpd.Apply(untpd.TypedSplice(generated), untpd.TypedSplice(argument) :: Nil),
+ pt)
+ val generated1 = adapt(generated, pt)
+ lazy val shadowing =
+ typed(untpd.Ident(ref.name) withPos pos.toSynthetic, funProto)(
+ nestedContext.addMode(Mode.ImplicitShadowing).setExploreTyperState)
+ def refMatches(shadowing: Tree): Boolean =
+ ref.symbol == closureBody(shadowing).symbol || {
+ shadowing match {
+ case Trees.Select(qual, nme.apply) => refMatches(qual)
+ case _ => false
+ }
+ }
+ // Does there exist an implicit value of type `Eq[tp, tp]`?
+ def hasEq(tp: Type): Boolean =
+ new ImplicitSearch(defn.EqType.appliedTo(tp, tp), EmptyTree, pos).bestImplicit match {
+ case result: SearchSuccess => result.ref.symbol != defn.Predef_eqAny
+ case result: AmbiguousImplicits => true
+ case _ => false
+ }
+ def validEqAnyArgs(tp1: Type, tp2: Type) = {
+ List(tp1, tp2).foreach(fullyDefinedType(_, "eqAny argument", pos))
+ assumedCanEqual(tp1, tp2) || !hasEq(tp1) && !hasEq(tp2) ||
+ { implicits.println(i"invalid eqAny[$tp1, $tp2]"); false }
+ }
+ if (ctx.reporter.hasErrors)
+ nonMatchingImplicit(ref)
+ else if (contextual && !ctx.mode.is(Mode.ImplicitShadowing) &&
+ !shadowing.tpe.isError && !refMatches(shadowing)) {
+ implicits.println(i"SHADOWING $ref in ${ref.termSymbol.owner} is shadowed by $shadowing in ${shadowing.symbol.owner}")
+ shadowedImplicit(ref, methPart(shadowing).tpe)
+ }
+ else generated1 match {
+ case TypeApply(fn, targs @ (arg1 :: arg2 :: Nil))
+ if fn.symbol == defn.Predef_eqAny && !validEqAnyArgs(arg1.tpe, arg2.tpe) =>
+ nonMatchingImplicit(ref)
+ case _ =>
+ SearchSuccess(generated1, ref, ctx.typerState)
+ }
+ }}
+
+ /** Given a list of implicit references, produce a list of all implicit search successes,
+ * where the first is supposed to be the best one.
+ * @param pending The list of implicit references that remain to be investigated
+ * @param acc An accumulator of successful matches found so far.
+ */
+ def rankImplicits(pending: List[TermRef], acc: List[SearchSuccess]): List[SearchSuccess] = pending match {
+ case ref :: pending1 =>
+ val history = ctx.searchHistory nest wildProto
+ val result =
+ if (history eq ctx.searchHistory) divergingImplicit(ref)
+ else typedImplicit(ref)(nestedContext.setNewTyperState.setSearchHistory(history))
+ result match {
+ case fail: SearchFailure =>
+ rankImplicits(pending1, acc)
+ case best: SearchSuccess =>
+ if (ctx.mode.is(Mode.ImplicitExploration)) best :: Nil
+ else {
+ val newPending = pending1 filter (isAsGood(_, best.ref)(nestedContext.setExploreTyperState))
+ rankImplicits(newPending, best :: acc)
+ }
+ }
+ case nil => acc
+ }
+
+ /** If the (result types of) the expected type, and both alternatives
+ * are all numeric value types, return the alternative which has
+ * the smaller numeric subtype as result type, if it exists.
+ * (This alternative is then discarded).
+ */
+ def numericValueTieBreak(alt1: SearchSuccess, alt2: SearchSuccess): SearchResult = {
+ def isNumeric(tp: Type) = tp.typeSymbol.isNumericValueClass
+ def isProperSubType(tp1: Type, tp2: Type) =
+ tp1.isValueSubType(tp2) && !tp2.isValueSubType(tp1)
+ val rpt = pt.resultType
+ val rt1 = alt1.ref.widen.resultType
+ val rt2 = alt2.ref.widen.resultType
+ if (isNumeric(rpt) && isNumeric(rt1) && isNumeric(rt2))
+ if (isProperSubType(rt1, rt2)) alt1
+ else if (isProperSubType(rt2, rt1)) alt2
+ else NoImplicitMatches
+ else NoImplicitMatches
+ }
+
+ /** Convert a (possibly empty) list of search successes into a single search result */
+ def condense(hits: List[SearchSuccess]): SearchResult = hits match {
+ case best :: alts =>
+ alts find (alt => isAsGood(alt.ref, best.ref)(ctx.fresh.setExploreTyperState)) match {
+ case Some(alt) =>
+ /* !!! DEBUG
+ println(i"ambiguous refs: ${hits map (_.ref) map (_.show) mkString ", "}")
+ isAsGood(best.ref, alt.ref, explain = true)(ctx.fresh.withExploreTyperState)
+ */
+ numericValueTieBreak(best, alt) match {
+ case eliminated: SearchSuccess => condense(hits.filter(_ ne eliminated))
+ case _ => new AmbiguousImplicits(best.ref, alt.ref, pt, argument)
+ }
+ case None =>
+ ctx.runInfo.useCount(best.ref) += 1
+ best
+ }
+ case Nil =>
+ failedSearch
+ }
+
+ /** Sort list of implicit references according to their popularity
+ * (# of times each was picked in current run).
+ */
+ def sort(eligible: List[TermRef]) = eligible match {
+ case Nil => eligible
+ case e1 :: Nil => eligible
+ case e1 :: e2 :: Nil =>
+ if (ctx.runInfo.useCount(e1) < ctx.runInfo.useCount(e2)) e2 :: e1 :: Nil
+ else eligible
+ case _ => eligible.sortBy(-ctx.runInfo.useCount(_))
+ }
+
+ condense(rankImplicits(sort(eligible), Nil))
+ }
+
+ /** Find a unique best implicit reference */
+ def bestImplicit: SearchResult = {
+ searchImplicits(ctx.implicits.eligible(wildProto), contextual = true) match {
+ case result: SearchSuccess => result
+ case result: AmbiguousImplicits => result
+ case result: SearchFailure =>
+ searchImplicits(implicitScope(wildProto).eligible, contextual = false)
+ }
+ }
+
+ def implicitScope(tp: Type): OfTypeImplicits = ctx.runInfo.implicitScope(tp, ctx)
+ }
+
+ final class ExplainedImplicitSearch(pt: Type, argument: Tree, pos: Position)(implicit ctx: Context)
+ extends ImplicitSearch(pt, argument, pos) {
+ private var myFailures = new mutable.ListBuffer[ExplainedSearchFailure]
+ private def record(fail: ExplainedSearchFailure) = {
+ myFailures += fail
+ fail
+ }
+ def failures = myFailures.toList
+ override def nonMatchingImplicit(ref: TermRef) =
+ record(new NonMatchingImplicit(ref, pt, argument))
+ override def divergingImplicit(ref: TermRef) =
+ record(new DivergingImplicit(ref, pt, argument))
+ override def shadowedImplicit(ref: TermRef, shadowing: Type): SearchFailure =
+ record(new ShadowedImplicit(ref, shadowing, pt, argument))
+ override def failedSearch: SearchFailure = {
+ //println(s"wildProto = $wildProto")
+ //println(s"implicit scope = ${implicitScope(wildProto).companionRefs}")
+ new FailedImplicit(failures, pt, argument)
+ }
+ }
+}
+
+/** Records the history of currently open implicit searches
+ * @param searchDepth The number of open searches.
+ * @param seen A map that records for each class symbol of a type
+ * that's currently searched for the complexity of the
+ * type that is searched for (wrt `typeSize`). The map
+ * is populated only once `searchDepth` is greater than
+ * the threshold given in the `XminImplicitSearchDepth` setting.
+ */
+class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) {
+
+ /** The number of RefinementTypes in this type, after all aliases are expanded */
+ private def typeSize(tp: Type)(implicit ctx: Context): Int = {
+ val accu = new TypeAccumulator[Int] {
+ def apply(n: Int, tp: Type): Int = tp match {
+ case tp: RefinedType =>
+ foldOver(n + 1, tp)
+ case tp: TypeRef if tp.info.isAlias =>
+ apply(n, tp.superType)
+ case _ =>
+ foldOver(n, tp)
+ }
+ }
+ accu.apply(0, tp)
+ }
+
+ /** Check for possible divergence. If one is detected return the current search history
+ * (this will be used as a criterion to abandon the implicit search in rankImplicits).
+ * If no divergence is detected, produce a new search history nested in the current one
+ * which records that we are now also looking for type `proto`.
+ *
+ * As long as `searchDepth` is lower than the `XminImplicitSearchDepth` value
+ * in settings, a new history is always produced, so the implicit search is always
+ * undertaken. If `searchDepth` matches or exceeds the `XminImplicitSearchDepth` value,
+ * we test that the new search is for a class that is either not yet in the set of
+ * `seen` classes, or the complexity of the type `proto` being searched for is strictly
+ * lower than the complexity of the type that was previously encountered and that had
+ * the same class symbol as `proto`. A possible divergence is detected if that test fails.
+ */
+ def nest(proto: Type)(implicit ctx: Context): SearchHistory = {
+ if (searchDepth < ctx.settings.XminImplicitSearchDepth.value)
+ new SearchHistory(searchDepth + 1, seen)
+ else {
+ val size = typeSize(proto)
+ def updateMap(csyms: List[ClassSymbol], seen: Map[ClassSymbol, Int]): SearchHistory = csyms match {
+ case csym :: csyms1 =>
+ seen get csym match {
+ // proto complexity is >= than the last time it was seen → diverge
+ case Some(prevSize) if size >= prevSize => this
+ case _ => updateMap(csyms1, seen.updated(csym, size))
+ }
+ case _ =>
+ new SearchHistory(searchDepth + 1, seen)
+ }
+ if (proto.classSymbols.isEmpty) this
+ else updateMap(proto.classSymbols, seen)
+ }
+ }
+}
+
+/** A set of term references where equality is =:= */
+class TermRefSet(implicit ctx: Context) extends mutable.Traversable[TermRef] {
+ import collection.JavaConverters._
+ private val elems = (new java.util.LinkedHashMap[TermSymbol, List[Type]]).asScala
+
+ def += (ref: TermRef): Unit = {
+ val pre = ref.prefix
+ val sym = ref.symbol.asTerm
+ elems get sym match {
+ case Some(prefixes) =>
+ if (!(prefixes exists (_ =:= pre))) elems(sym) = pre :: prefixes
+ case None =>
+ elems(sym) = pre :: Nil
+ }
+ }
+
+ def ++= (refs: TraversableOnce[TermRef]): Unit =
+ refs foreach +=
+
+ override def foreach[U](f: TermRef => U): Unit =
+ for (sym <- elems.keysIterator)
+ for (pre <- elems(sym))
+ f(TermRef(pre, sym))
+}
+
+@sharable object EmptyTermRefSet extends TermRefSet()(NoContext)
diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
new file mode 100644
index 000000000..3aa289181
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
@@ -0,0 +1,117 @@
+package dotty.tools
+package dotc
+package typer
+
+import ast.{tpd, untpd}
+import ast.Trees._
+import core._
+import util.SimpleMap
+import Symbols._, Names._, Denotations._, Types._, Contexts._, StdNames._, Flags._
+import Decorators.StringInterpolators
+
+object ImportInfo {
+ /** The import info for a root import from given symbol `sym` */
+ def rootImport(refFn: () => TermRef)(implicit ctx: Context) = {
+ val selectors = untpd.Ident(nme.WILDCARD) :: Nil
+ def expr = tpd.Ident(refFn())
+ def imp = tpd.Import(expr, selectors)
+ new ImportInfo(imp.symbol, selectors, isRootImport = true)
+ }
+}
+
+/** Info relating to an import clause
+ * @param sym The import symbol defined by the clause
+ * @param selectors The selector clauses
+ * @param rootImport true if this is one of the implicit imports of scala, java.lang
+ * or Predef in the start context, false otherwise.
+ */
+class ImportInfo(symf: => Symbol, val selectors: List[untpd.Tree], val isRootImport: Boolean = false)(implicit ctx: Context) {
+
+ lazy val sym = symf
+
+ /** The (TermRef) type of the qualifier of the import clause */
+ def site(implicit ctx: Context): Type = {
+ val ImportType(expr) = sym.info
+ expr.tpe
+ }
+
+ /** The names that are excluded from any wildcard import */
+ def excluded: Set[TermName] = { ensureInitialized(); myExcluded }
+
+ /** A mapping from renamed to original names */
+ def reverseMapping: SimpleMap[TermName, TermName] = { ensureInitialized(); myMapped }
+
+ /** The original names imported by-name before renaming */
+ def originals: Set[TermName] = { ensureInitialized(); myOriginals }
+
+ /** Does the import clause end with wildcard? */
+ def isWildcardImport = { ensureInitialized(); myWildcardImport }
+
+ private var myExcluded: Set[TermName] = null
+ private var myMapped: SimpleMap[TermName, TermName] = null
+ private var myOriginals: Set[TermName] = null
+ private var myWildcardImport: Boolean = false
+
+ /** Compute info relating to the selector list */
+ private def ensureInitialized(): Unit = if (myExcluded == null) {
+ myExcluded = Set()
+ myMapped = SimpleMap.Empty
+ myOriginals = Set()
+ def recur(sels: List[untpd.Tree]): Unit = sels match {
+ case sel :: sels1 =>
+ sel match {
+ case Thicket(Ident(name: TermName) :: Ident(nme.WILDCARD) :: Nil) =>
+ myExcluded += name
+ case Thicket(Ident(from: TermName) :: Ident(to: TermName) :: Nil) =>
+ myMapped = myMapped.updated(to, from)
+ myExcluded += from
+ myOriginals += from
+ case Ident(nme.WILDCARD) =>
+ myWildcardImport = true
+ case Ident(name: TermName) =>
+ myMapped = myMapped.updated(name, name)
+ myOriginals += name
+ }
+ recur(sels1)
+ case nil =>
+ }
+ recur(selectors)
+ }
+
+ /** The implicit references imported by this import clause */
+ def importedImplicits: List[TermRef] = {
+ val pre = site
+ if (isWildcardImport) {
+ val refs = pre.implicitMembers
+ if (excluded.isEmpty) refs
+ else refs filterNot (ref => excluded contains ref.name.toTermName)
+ } else
+ for {
+ renamed <- reverseMapping.keys
+ denot <- pre.member(reverseMapping(renamed)).altsWith(_ is Implicit)
+ } yield TermRef.withSigAndDenot(pre, renamed, denot.signature, denot)
+ }
+
+ /** The root import symbol hidden by this symbol, or NoSymbol if no such symbol is hidden.
+ * Note: this computation needs to work even for un-initialized import infos, and
+ * is not allowed to force initialization.
+ */
+ lazy val hiddenRoot: Symbol = {
+ val sym = site.termSymbol
+ def hasMaskingSelector = selectors exists {
+ case Thicket(_ :: Ident(nme.WILDCARD) :: Nil) => true
+ case _ => false
+ }
+ if ((defn.RootImportTypes exists (_.symbol == sym)) && hasMaskingSelector) sym else NoSymbol
+ }
+
+ override def toString = {
+ val siteStr = site.show
+ val exprStr = if (siteStr endsWith ".type") siteStr dropRight 5 else siteStr
+ val selectorStr = selectors match {
+ case Ident(name) :: Nil => name.show
+ case _ => "{...}"
+ }
+ i"import $exprStr.$selectorStr"
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
new file mode 100644
index 000000000..aede4974a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -0,0 +1,362 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast._
+import Contexts._, Types._, Flags._, Denotations._, Names._, StdNames._, NameOps._, Symbols._
+import Trees._
+import Constants._
+import Scopes._
+import ProtoTypes._
+import annotation.unchecked
+import util.Positions._
+import util.{Stats, SimpleMap}
+import util.common._
+import Decorators._
+import Uniques._
+import config.Printers.{typr, constr}
+import annotation.tailrec
+import reporting._
+import collection.mutable
+
+object Inferencing {
+
+ import tpd._
+
+ /** Is type fully defined, meaning the type does not contain wildcard types
+ * or uninstantiated type variables. As a side effect, this will minimize
+ * any uninstantiated type variables, according to the given force degree,
+ * but only if the overall result of `isFullyDefined` is `true`.
+ * Variables that are successfully minimized do not count as uninstantiated.
+ */
+ def isFullyDefined(tp: Type, force: ForceDegree.Value)(implicit ctx: Context): Boolean = {
+ val nestedCtx = ctx.fresh.setNewTyperState
+ val result = new IsFullyDefinedAccumulator(force)(nestedCtx).process(tp)
+ if (result) nestedCtx.typerState.commit()
+ result
+ }
+
+ /** The fully defined type, where all type variables are forced.
+ * Throws an error if type contains wildcards.
+ */
+ def fullyDefinedType(tp: Type, what: String, pos: Position)(implicit ctx: Context) =
+ if (isFullyDefined(tp, ForceDegree.all)) tp
+ else throw new Error(i"internal error: type of $what $tp is not fully defined, pos = $pos") // !!! DEBUG
+
+
+ /** Instantiate selected type variables `tvars` in type `tp` */
+ def instantiateSelected(tp: Type, tvars: List[Type])(implicit ctx: Context): Unit =
+ new IsFullyDefinedAccumulator(new ForceDegree.Value(tvars.contains, minimizeAll = true)).process(tp)
+
+ /** The accumulator which forces type variables using the policy encoded in `force`
+ * and returns whether the type is fully defined. The direction in which
+ * a type variable is instantiated is determined as follows:
+ * 1. T is minimized if the constraint over T is only from below (i.e.
+ * constrained lower bound != given lower bound and
+ * constrained upper bound == given upper bound).
+ * 2. T is maximized if the constraint over T is only from above (i.e.
+ * constrained upper bound != given upper bound and
+ * constrained lower bound == given lower bound).
+ * If (1) and (2) do not apply:
+ * 3. T is maximized if it appears only contravariantly in the given type.
+ * 4. T is minimized in all other cases.
+ *
+ * The instantiation is done in two phases:
+ * 1st Phase: Try to instantiate minimizable type variables to
+ * their lower bound. Record whether successful.
+ * 2nd Phase: If first phase was successful, instantiate all remaining type variables
+ * to their upper bound.
+ */
+ private class IsFullyDefinedAccumulator(force: ForceDegree.Value)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
+ private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = {
+ val inst = tvar.instantiate(fromBelow)
+ typr.println(i"forced instantiation of ${tvar.origin} = $inst")
+ inst
+ }
+ private var toMaximize: Boolean = false
+ def apply(x: Boolean, tp: Type): Boolean = tp.dealias match {
+ case _: WildcardType | _: ProtoType =>
+ false
+ case tvar: TypeVar
+ if !tvar.isInstantiated && ctx.typerState.constraint.contains(tvar) =>
+ force.appliesTo(tvar) && {
+ val direction = instDirection(tvar.origin)
+ if (direction != 0) {
+ //if (direction > 0) println(s"inst $tvar dir = up")
+ instantiate(tvar, direction < 0)
+ }
+ else {
+ val minimize =
+ force.minimizeAll ||
+ variance >= 0 && !(
+ force == ForceDegree.noBottom &&
+ defn.isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true)))
+ if (minimize) instantiate(tvar, fromBelow = true)
+ else toMaximize = true
+ }
+ foldOver(x, tvar)
+ }
+ case tp =>
+ foldOver(x, tp)
+ }
+
+ private class UpperInstantiator(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ def apply(x: Unit, tp: Type): Unit = {
+ tp match {
+ case tvar: TypeVar if !tvar.isInstantiated =>
+ instantiate(tvar, fromBelow = false)
+ case _ =>
+ }
+ foldOver(x, tp)
+ }
+ }
+
+ def process(tp: Type): Boolean = {
+ val res = apply(true, tp)
+ if (res && toMaximize) new UpperInstantiator().apply((), tp)
+ res
+ }
+ }
+
+ /** The list of uninstantiated type variables bound by some prefix of type `T` which
+ * occur in at least one formal parameter type of a prefix application.
+ * Considered prefixes are:
+ * - The function `f` of an application node `f(e1, .., en)`
+ * - The function `f` of a type application node `f[T1, ..., Tn]`
+ * - The prefix `p` of a selection `p.f`.
+ * - The result expression `e` of a block `{s1; .. sn; e}`.
+ */
+ def tvarsInParams(tree: Tree)(implicit ctx: Context): List[TypeVar] = {
+ @tailrec def boundVars(tree: Tree, acc: List[TypeVar]): List[TypeVar] = tree match {
+ case Apply(fn, _) => boundVars(fn, acc)
+ case TypeApply(fn, targs) =>
+ val tvars = targs.tpes.collect {
+ case tvar: TypeVar if !tvar.isInstantiated => tvar
+ }
+ boundVars(fn, acc ::: tvars)
+ case Select(pre, _) => boundVars(pre, acc)
+ case Block(_, expr) => boundVars(expr, acc)
+ case _ => acc
+ }
+ @tailrec def occurring(tree: Tree, toTest: List[TypeVar], acc: List[TypeVar]): List[TypeVar] =
+ if (toTest.isEmpty) acc
+ else tree match {
+ case Apply(fn, _) =>
+ fn.tpe.widen match {
+ case mtp: MethodType =>
+ val (occ, nocc) = toTest.partition(tvar => mtp.paramTypes.exists(tvar.occursIn))
+ occurring(fn, nocc, occ ::: acc)
+ case _ =>
+ occurring(fn, toTest, acc)
+ }
+ case TypeApply(fn, targs) => occurring(fn, toTest, acc)
+ case Select(pre, _) => occurring(pre, toTest, acc)
+ case Block(_, expr) => occurring(expr, toTest, acc)
+ case _ => acc
+ }
+ occurring(tree, boundVars(tree, Nil), Nil)
+ }
+
+ /** The instantiation direction for given poly param computed
+ * from the constraint:
+ * @return 1 (maximize) if constraint is uniformly from above,
+ * -1 (minimize) if constraint is uniformly from below,
+ * 0 if unconstrained, or constraint is from below and above.
+ */
+ private def instDirection(param: PolyParam)(implicit ctx: Context): Int = {
+ val constrained = ctx.typerState.constraint.fullBounds(param)
+ val original = param.binder.paramBounds(param.paramNum)
+ val cmp = ctx.typeComparer
+ val approxBelow =
+ if (!cmp.isSubTypeWhenFrozen(constrained.lo, original.lo)) 1 else 0
+ val approxAbove =
+ if (!cmp.isSubTypeWhenFrozen(original.hi, constrained.hi)) 1 else 0
+ approxAbove - approxBelow
+ }
+
+ /** Recursively widen and also follow type declarations and type aliases. */
+ def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match {
+ case tp: TypeRef if !tp.symbol.isClass =>
+ widenForMatchSelector(tp.superType)
+ case tp: HKApply =>
+ widenForMatchSelector(tp.superType)
+ case tp: AnnotatedType =>
+ tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot)
+ case tp => tp
+ }
+
+ /** Following type aliases and stripping refinements and annotations, if one arrives at a
+ * class type reference where the class has a companion module, a reference to
+ * that companion module. Otherwise NoType
+ */
+ def companionRef(tp: Type)(implicit ctx: Context): Type =
+ tp.underlyingClassRef(refinementOK = true) match {
+ case tp: TypeRef =>
+ val companion = tp.classSymbol.companionModule
+ if (companion.exists)
+ companion.valRef.asSeenFrom(tp.prefix, companion.symbol.owner)
+ else NoType
+ case _ => NoType
+ }
+
+ /** Interpolate those undetermined type variables in the widened type of this tree
+ * which are introduced by type application contained in the tree.
+ * If such a variable appears covariantly in type `tp` or does not appear at all,
+ * approximate it by its lower bound. Otherwise, if it appears contravariantly
+ * in type `tp` approximate it by its upper bound.
+ * @param ownedBy if it is different from NoSymbol, all type variables owned by
+ * `ownedBy` qualify, independent of position.
+ * Without that second condition, it can be that certain variables escape
+ * interpolation, for instance when their tree was eta-lifted, so
+ * the typechecked tree is no longer the tree in which the variable
+ * was declared. A concrete example of this phenomenon can be
+ * observed when compiling core.TypeOps#asSeenFrom.
+ */
+ def interpolateUndetVars(tree: Tree, ownedBy: Symbol)(implicit ctx: Context): Unit = {
+ val constraint = ctx.typerState.constraint
+ val qualifies = (tvar: TypeVar) =>
+ (tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy
+ def interpolate() = Stats.track("interpolateUndetVars") {
+ val tp = tree.tpe.widen
+ constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}")
+ constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}")
+
+ val vs = variances(tp, qualifies)
+ val hasUnreportedErrors = ctx.typerState.reporter match {
+ case r: StoreReporter if r.hasErrors => true
+ case _ => false
+ }
+ // Avoid interpolating variables if typerstate has unreported errors.
+ // Reason: The errors might reflect unsatisfiable constraints. In that
+ // case interpolating without taking account the constraints risks producing
+ // nonsensical types that then in turn produce incomprehensible errors.
+ // An example is in neg/i1240.scala. Without the condition in the next code line
+ // we get for
+ //
+ // val y: List[List[String]] = List(List(1))
+ //
+ // i1430.scala:5: error: type mismatch:
+ // found : Int(1)
+ // required: Nothing
+ // val y: List[List[String]] = List(List(1))
+ // ^
+ // With the condition, we get the much more sensical:
+ //
+ // i1430.scala:5: error: type mismatch:
+ // found : Int(1)
+ // required: String
+ // val y: List[List[String]] = List(List(1))
+ if (!hasUnreportedErrors)
+ vs foreachBinding { (tvar, v) =>
+ if (v != 0) {
+ typr.println(s"interpolate ${if (v == 1) "co" else "contra"}variant ${tvar.show} in ${tp.show}")
+ tvar.instantiate(fromBelow = v == 1)
+ }
+ }
+ for (tvar <- constraint.uninstVars)
+ if (!(vs contains tvar) && qualifies(tvar)) {
+ typr.println(s"instantiating non-occurring ${tvar.show} in ${tp.show} / $tp")
+ tvar.instantiate(fromBelow = true)
+ }
+ }
+ if (constraint.uninstVars exists qualifies) interpolate()
+ }
+
+ /** Instantiate undetermined type variables to that type `tp` is
+ * maximized and return None. If this is not possible, because a non-variant
+ * typevar is not uniquely determined, return that typevar in a Some.
+ */
+ def maximizeType(tp: Type)(implicit ctx: Context): Option[TypeVar] = Stats.track("maximizeType") {
+ val vs = variances(tp, alwaysTrue)
+ var result: Option[TypeVar] = None
+ vs foreachBinding { (tvar, v) =>
+ if (v == 1) tvar.instantiate(fromBelow = false)
+ else if (v == -1) tvar.instantiate(fromBelow = true)
+ else {
+ val bounds = ctx.typerState.constraint.fullBounds(tvar.origin)
+ if (!(bounds.hi <:< bounds.lo)) result = Some(tvar)
+ tvar.instantiate(fromBelow = false)
+ }
+ }
+ result
+ }
+
+ type VarianceMap = SimpleMap[TypeVar, Integer]
+
+ /** All occurrences of type vars in this type that satisfy predicate
+ * `include` mapped to their variances (-1/0/1) in this type, where
+ * -1 means: only covariant occurrences
+ * +1 means: only covariant occurrences
+ * 0 means: mixed or non-variant occurrences
+ *
+ * Note: We intentionally use a relaxed version of variance here,
+ * where the variance does not change under a prefix of a named type
+ * (the strict version makes prefixes invariant). This turns out to be
+ * better for type inference. In a nutshell, if a type variable occurs
+ * like this:
+ *
+ * (U? >: x.type) # T
+ *
+ * we want to instantiate U to x.type right away. No need to wait further.
+ */
+ private def variances(tp: Type, include: TypeVar => Boolean)(implicit ctx: Context): VarianceMap = Stats.track("variances") {
+ val constraint = ctx.typerState.constraint
+
+ object accu extends TypeAccumulator[VarianceMap] {
+ def setVariance(v: Int) = variance = v
+ def apply(vmap: VarianceMap, t: Type): VarianceMap = t match {
+ case t: TypeVar
+ if !t.isInstantiated && (ctx.typerState.constraint contains t) && include(t) =>
+ val v = vmap(t)
+ if (v == null) vmap.updated(t, variance)
+ else if (v == variance || v == 0) vmap
+ else vmap.updated(t, 0)
+ case _ =>
+ foldOver(vmap, t)
+ }
+ override def applyToPrefix(vmap: VarianceMap, t: NamedType) =
+ apply(vmap, t.prefix)
+ }
+
+ /** Include in `vmap` type variables occurring in the constraints of type variables
+ * already in `vmap`. Specifically:
+ * - if `tvar` is covariant in `vmap`, include all variables in its lower bound
+ * (because they influence the minimal solution of `tvar`),
+ * - if `tvar` is contravariant in `vmap`, include all variables in its upper bound
+ * at flipped variances (because they influence the maximal solution of `tvar`),
+ * - if `tvar` is nonvariant in `vmap`, include all variables in its upper and lower
+ * bounds as non-variant.
+ * Do this in a fixpoint iteration until `vmap` stabilizes.
+ */
+ def propagate(vmap: VarianceMap): VarianceMap = {
+ var vmap1 = vmap
+ def traverse(tp: Type) = { vmap1 = accu(vmap1, tp) }
+ vmap.foreachBinding { (tvar, v) =>
+ val param = tvar.origin
+ val e = constraint.entry(param)
+ accu.setVariance(v)
+ if (v >= 0) {
+ traverse(e.bounds.lo)
+ constraint.lower(param).foreach(p => traverse(constraint.typeVarOfParam(p)))
+ }
+ if (v <= 0) {
+ traverse(e.bounds.hi)
+ constraint.upper(param).foreach(p => traverse(constraint.typeVarOfParam(p)))
+ }
+ }
+ if (vmap1 eq vmap) vmap else propagate(vmap1)
+ }
+
+ propagate(accu(SimpleMap.Empty, tp))
+ }
+}
+
+/** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */
+@sharable object ForceDegree {
+ class Value(val appliesTo: TypeVar => Boolean, val minimizeAll: Boolean)
+ val none = new Value(_ => false, minimizeAll = false)
+ val all = new Value(_ => true, minimizeAll = false)
+ val noBottom = new Value(_ => true, minimizeAll = false)
+}
+
diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala
new file mode 100644
index 000000000..3931fcaf4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala
@@ -0,0 +1,539 @@
+package dotty.tools
+package dotc
+package typer
+
+import dotty.tools.dotc.ast.Trees.NamedArg
+import dotty.tools.dotc.ast.{Trees, untpd, tpd, TreeTypeMap}
+import Trees._
+import core._
+import Flags._
+import Symbols._
+import Types._
+import Decorators._
+import Constants._
+import StdNames.nme
+import Contexts.Context
+import Names.{Name, TermName}
+import NameOps._
+import SymDenotations.SymDenotation
+import Annotations._
+import transform.ExplicitOuter
+import Inferencing.fullyDefinedType
+import config.Printers.inlining
+import ErrorReporting.errorTree
+import collection.mutable
+import transform.TypeUtils._
+
+object Inliner {
+ import tpd._
+
+ /** Adds accessors accessors for all non-public term members accessed
+ * from `tree`. Non-public type members are currently left as they are.
+ * This means that references to a private type will lead to typing failures
+ * on the code when it is inlined. Less than ideal, but hard to do better (see below).
+ *
+ * @return If there are accessors generated, a thicket consisting of the rewritten `tree`
+ * and all accessors, otherwise the original tree.
+ */
+ private def makeInlineable(tree: Tree)(implicit ctx: Context) = {
+
+ /** A tree map which inserts accessors for all non-public term members accessed
+ * from inlined code. Accesors are collected in the `accessors` buffer.
+ */
+ object addAccessors extends TreeMap {
+ val inlineMethod = ctx.owner
+ val accessors = new mutable.ListBuffer[MemberDef]
+
+ /** A definition needs an accessor if it is private, protected, or qualified private */
+ def needsAccessor(sym: Symbol)(implicit ctx: Context) =
+ sym.is(AccessFlags) || sym.privateWithin.exists
+
+ /** The name of the next accessor to be generated */
+ def accessorName(implicit ctx: Context) =
+ ctx.freshNames.newName(inlineMethod.name.asTermName.inlineAccessorName.toString)
+
+ /** A fresh accessor symbol.
+ *
+ * @param tree The tree representing the original access to the non-public member
+ * @param accessorInfo The type of the accessor
+ */
+ def accessorSymbol(tree: Tree, accessorInfo: Type)(implicit ctx: Context): Symbol =
+ ctx.newSymbol(
+ owner = inlineMethod.owner,
+ name = if (tree.isTerm) accessorName.toTermName else accessorName.toTypeName,
+ flags = if (tree.isTerm) Synthetic | Method else Synthetic,
+ info = accessorInfo,
+ coord = tree.pos).entered
+
+ /** Add an accessor to a non-public method and replace the original access with a
+ * call to the accessor.
+ *
+ * @param tree The original access to the non-public symbol
+ * @param refPart The part that refers to the method or field of the original access
+ * @param targs All type arguments passed in the access, if any
+ * @param argss All value arguments passed in the access, if any
+ * @param accessedType The type of the accessed method or field, as seen from the access site.
+ * @param rhs A function that builds the right-hand side of the accessor,
+ * given a reference to the accessed symbol and any type and
+ * value arguments the need to be integrated.
+ * @return The call to the accessor method that replaces the original access.
+ */
+ def addAccessor(tree: Tree, refPart: Tree, targs: List[Tree], argss: List[List[Tree]],
+ accessedType: Type, rhs: (Tree, List[Type], List[List[Tree]]) => Tree)(implicit ctx: Context): Tree = {
+ val qual = qualifier(refPart)
+ def refIsLocal = qual match {
+ case qual: This => qual.symbol == refPart.symbol.owner
+ case _ => false
+ }
+ val (accessorDef, accessorRef) =
+ if (refPart.symbol.isStatic || refIsLocal) {
+ // Easy case: Reference to a static symbol or a symbol referenced via `this.`
+ val accessorType = accessedType.ensureMethodic
+ val accessor = accessorSymbol(tree, accessorType).asTerm
+ val accessorDef = polyDefDef(accessor, tps => argss =>
+ rhs(refPart, tps, argss))
+ val accessorRef = ref(accessor).appliedToTypeTrees(targs).appliedToArgss(argss)
+ (accessorDef, accessorRef)
+ } else {
+ // Hard case: Reference needs to go via a dynamic prefix
+ inlining.println(i"adding inline accessor for $tree -> (${qual.tpe}, $refPart: ${refPart.getClass}, [$targs%, %], ($argss%, %))")
+
+ // Need to dealias in order to catch all possible references to abstracted over types in
+ // substitutions
+ val dealiasMap = new TypeMap {
+ def apply(t: Type) = mapOver(t.dealias)
+ }
+
+ val qualType = dealiasMap(qual.tpe.widen)
+
+ // Add qualifier type as leading method argument to argument `tp`
+ def addQualType(tp: Type): Type = tp match {
+ case tp: PolyType => tp.derivedPolyType(tp.paramNames, tp.paramBounds, addQualType(tp.resultType))
+ case tp: ExprType => addQualType(tp.resultType)
+ case tp => MethodType(qualType :: Nil, tp)
+ }
+
+ // The types that are local to the inlined method, and that therefore have
+ // to be abstracted out in the accessor, which is external to the inlined method
+ val localRefs = qualType.namedPartsWith(_.symbol.isContainedIn(inlineMethod)).toList
+
+ // Abstract accessed type over local refs
+ def abstractQualType(mtpe: Type): Type =
+ if (localRefs.isEmpty) mtpe
+ else mtpe.LambdaAbstract(localRefs.map(_.symbol)).asInstanceOf[PolyType].flatten
+
+ val accessorType = abstractQualType(addQualType(dealiasMap(accessedType)))
+ val accessor = accessorSymbol(tree, accessorType).asTerm
+
+ val accessorDef = polyDefDef(accessor, tps => argss =>
+ rhs(argss.head.head.select(refPart.symbol), tps.drop(localRefs.length), argss.tail))
+
+ val accessorRef = ref(accessor)
+ .appliedToTypeTrees(localRefs.map(TypeTree(_)) ++ targs)
+ .appliedToArgss((qual :: Nil) :: argss)
+ (accessorDef, accessorRef)
+ }
+ accessors += accessorDef
+ inlining.println(i"added inline accessor: $accessorDef")
+ accessorRef
+ }
+
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = super.transform {
+ tree match {
+ case _: Apply | _: TypeApply | _: RefTree if needsAccessor(tree.symbol) =>
+ if (tree.isTerm) {
+ val (methPart, targs, argss) = decomposeCall(tree)
+ addAccessor(tree, methPart, targs, argss,
+ accessedType = methPart.tpe.widen,
+ rhs = (qual, tps, argss) => qual.appliedToTypes(tps).appliedToArgss(argss))
+ } else {
+ // TODO: Handle references to non-public types.
+ // This is quite tricky, as such types can appear anywhere, including as parts
+ // of types of other things. For the moment we do nothing and complain
+ // at the implicit expansion site if there's a reference to an inaccessible type.
+ // Draft code (incomplete):
+ //
+ // val accessor = accessorSymbol(tree, TypeAlias(tree.tpe)).asType
+ // myAccessors += TypeDef(accessor)
+ // ref(accessor)
+ //
+ tree
+ }
+ case Assign(lhs: RefTree, rhs) if needsAccessor(lhs.symbol) =>
+ addAccessor(tree, lhs, Nil, (rhs :: Nil) :: Nil,
+ accessedType = MethodType(rhs.tpe.widen :: Nil, defn.UnitType),
+ rhs = (lhs, tps, argss) => lhs.becomes(argss.head.head))
+ case _ => tree
+ }
+ }
+ }
+
+ val tree1 = addAccessors.transform(tree)
+ flatTree(tree1 :: addAccessors.accessors.toList)
+ }
+
+ /** Register inline info for given inline method `sym`.
+ *
+ * @param sym The symbol denotatioon of the inline method for which info is registered
+ * @param treeExpr A function that computes the tree to be inlined, given a context
+ * This tree may still refer to non-public members.
+ * @param ctx The context to use for evaluating `treeExpr`. It needs
+ * to have the inlined method as owner.
+ */
+ def registerInlineInfo(
+ sym: SymDenotation, treeExpr: Context => Tree)(implicit ctx: Context): Unit = {
+ sym.unforcedAnnotation(defn.BodyAnnot) match {
+ case Some(ann: ConcreteBodyAnnotation) =>
+ case Some(ann: LazyBodyAnnotation) if ann.isEvaluated =>
+ case _ =>
+ if (!ctx.isAfterTyper) {
+ val inlineCtx = ctx
+ sym.updateAnnotation(LazyBodyAnnotation { _ =>
+ implicit val ctx: Context = inlineCtx
+ ctx.withNoError(treeExpr(ctx))(makeInlineable)
+ })
+ }
+ }
+ }
+
+ /** `sym` has an inline method with a known body to inline (note: definitions coming
+ * from Scala2x class files might be `@inline`, but still lack that body.
+ */
+ def hasBodyToInline(sym: SymDenotation)(implicit ctx: Context): Boolean =
+ sym.isInlineMethod && sym.hasAnnotation(defn.BodyAnnot)
+
+ private def bodyAndAccessors(sym: SymDenotation)(implicit ctx: Context): (Tree, List[MemberDef]) =
+ sym.unforcedAnnotation(defn.BodyAnnot).get.tree match {
+ case Thicket(body :: accessors) => (body, accessors.asInstanceOf[List[MemberDef]])
+ case body => (body, Nil)
+ }
+
+ /** The body to inline for method `sym`.
+ * @pre hasBodyToInline(sym)
+ */
+ def bodyToInline(sym: SymDenotation)(implicit ctx: Context): Tree =
+ bodyAndAccessors(sym)._1
+
+ /** The accessors to non-public members needed by the inlinable body of `sym`.
+ * These accessors are dropped as a side effect of calling this method.
+ * @pre hasBodyToInline(sym)
+ */
+ def removeInlineAccessors(sym: SymDenotation)(implicit ctx: Context): List[MemberDef] = {
+ val (body, accessors) = bodyAndAccessors(sym)
+ if (accessors.nonEmpty) sym.updateAnnotation(ConcreteBodyAnnotation(body))
+ accessors
+ }
+
+ /** Try to inline a call to a `@inline` method. Fail with error if the maximal
+ * inline depth is exceeded.
+ *
+ * @param tree The call to inline
+ * @param pt The expected type of the call.
+ * @return An `Inlined` node that refers to the original call and the inlined bindings
+ * and body that replace it.
+ */
+ def inlineCall(tree: Tree, pt: Type)(implicit ctx: Context): Tree =
+ if (enclosingInlineds.length < ctx.settings.xmaxInlines.value)
+ new Inliner(tree, bodyToInline(tree.symbol)).inlined(pt)
+ else errorTree(
+ tree,
+ i"""|Maximal number of successive inlines (${ctx.settings.xmaxInlines.value}) exceeded,
+ |Maybe this is caused by a recursive inline method?
+ |You can use -Xmax:inlines to change the limit."""
+ )
+
+ /** Replace `Inlined` node by a block that contains its bindings and expansion */
+ def dropInlined(inlined: tpd.Inlined)(implicit ctx: Context): Tree = {
+ val reposition = new TreeMap {
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = {
+ super.transform(tree).withPos(inlined.call.pos)
+ }
+ }
+ tpd.seq(inlined.bindings, reposition.transform(inlined.expansion))
+ }
+
+ /** The qualifier part of a Select or Ident.
+ * For an Ident, this is the `This` of the current class. (TODO: use elsewhere as well?)
+ */
+ private def qualifier(tree: Tree)(implicit ctx: Context) = tree match {
+ case Select(qual, _) => qual
+ case _ => This(ctx.owner.enclosingClass.asClass)
+ }
+}
+
+/** Produces an inlined version of `call` via its `inlined` method.
+ *
+ * @param call The original call to a `@inline` method
+ * @param rhs The body of the inline method that replaces the call.
+ */
+class Inliner(call: tpd.Tree, rhs: tpd.Tree)(implicit ctx: Context) {
+ import tpd._
+ import Inliner._
+
+ private val (methPart, targs, argss) = decomposeCall(call)
+ private val meth = methPart.symbol
+ private val prefix = qualifier(methPart)
+
+ // Make sure all type arguments to the call are fully determined
+ for (targ <- targs) fullyDefinedType(targ.tpe, "inlined type argument", targ.pos)
+
+ /** A map from parameter names of the inline method to references of the actual arguments.
+ * For a type argument this is the full argument type.
+ * For a value argument, it is a reference to either the argument value
+ * (if the argument is a pure expression of singleton type), or to `val` or `def` acting
+ * as a proxy (if the argument is something else).
+ */
+ private val paramBinding = new mutable.HashMap[Name, Type]
+
+ /** A map from references to (type and value) parameters of the inline method
+ * to their corresponding argument or proxy references, as given by `paramBinding`.
+ */
+ private val paramProxy = new mutable.HashMap[Type, Type]
+
+ /** A map from the classes of (direct and outer) this references in `rhs`
+ * to references of their proxies.
+ * Note that we can't index by the ThisType itself since there are several
+ * possible forms to express what is logicaly the same ThisType. E.g.
+ *
+ * ThisType(TypeRef(ThisType(p), cls))
+ *
+ * vs
+ *
+ * ThisType(TypeRef(TermRef(ThisType(<root>), p), cls))
+ *
+ * These are different (wrt ==) types but represent logically the same key
+ */
+ private val thisProxy = new mutable.HashMap[ClassSymbol, TermRef]
+
+ /** A buffer for bindings that define proxies for actual arguments */
+ val bindingsBuf = new mutable.ListBuffer[ValOrDefDef]
+
+ computeParamBindings(meth.info, targs, argss)
+
+ private def newSym(name: Name, flags: FlagSet, info: Type): Symbol =
+ ctx.newSymbol(ctx.owner, name, flags, info, coord = call.pos)
+
+ /** Populate `paramBinding` and `bindingsBuf` by matching parameters with
+ * corresponding arguments. `bindingbuf` will be further extended later by
+ * proxies to this-references.
+ */
+ private def computeParamBindings(tp: Type, targs: List[Tree], argss: List[List[Tree]]): Unit = tp match {
+ case tp: PolyType =>
+ (tp.paramNames, targs).zipped.foreach { (name, arg) =>
+ paramBinding(name) = arg.tpe.stripTypeVar
+ }
+ computeParamBindings(tp.resultType, Nil, argss)
+ case tp: MethodType =>
+ (tp.paramNames, tp.paramTypes, argss.head).zipped.foreach { (name, paramtp, arg) =>
+ def isByName = paramtp.dealias.isInstanceOf[ExprType]
+ paramBinding(name) = arg.tpe.stripAnnots.stripTypeVar match {
+ case argtpe: SingletonType if isByName || isIdempotentExpr(arg) => argtpe
+ case argtpe =>
+ val inlineFlag = if (paramtp.hasAnnotation(defn.InlineParamAnnot)) Inline else EmptyFlags
+ val (bindingFlags, bindingType) =
+ if (isByName) (inlineFlag | Method, ExprType(argtpe.widen))
+ else (inlineFlag, argtpe.widen)
+ val boundSym = newSym(name, bindingFlags, bindingType).asTerm
+ val binding =
+ if (isByName) DefDef(boundSym, arg.changeOwner(ctx.owner, boundSym))
+ else ValDef(boundSym, arg)
+ bindingsBuf += binding
+ boundSym.termRef
+ }
+ }
+ computeParamBindings(tp.resultType, targs, argss.tail)
+ case _ =>
+ assert(targs.isEmpty)
+ assert(argss.isEmpty)
+ }
+
+ /** Populate `thisProxy` and `paramProxy` as follows:
+ *
+ * 1a. If given type refers to a static this, thisProxy binds it to corresponding global reference,
+ * 1b. If given type refers to an instance this, create a proxy symbol and bind the thistype to
+ * refer to the proxy. The proxy is not yet entered in `bindingsBuf` that will come later.
+ * 2. If given type refers to a parameter, make `paramProxy` refer to the entry stored
+ * in `paramNames` under the parameter's name. This roundabout way to bind parameter
+ * references to proxies is done because we not known a priori what the parameter
+ * references of a method are (we only know the method's type, but that contains PolyParams
+ * and MethodParams, not TypeRefs or TermRefs.
+ */
+ private def registerType(tpe: Type): Unit = tpe match {
+ case tpe: ThisType
+ if !ctx.owner.isContainedIn(tpe.cls) && !tpe.cls.is(Package) &&
+ !thisProxy.contains(tpe.cls) =>
+ if (tpe.cls.isStaticOwner)
+ thisProxy(tpe.cls) = tpe.cls.sourceModule.termRef
+ else {
+ val proxyName = s"${tpe.cls.name}_this".toTermName
+ val proxyType = tpe.asSeenFrom(prefix.tpe, meth.owner)
+ thisProxy(tpe.cls) = newSym(proxyName, EmptyFlags, proxyType).termRef
+ registerType(meth.owner.thisType) // make sure we have a base from which to outer-select
+ }
+ case tpe: NamedType
+ if tpe.symbol.is(Param) && tpe.symbol.owner == meth &&
+ !paramProxy.contains(tpe) =>
+ paramProxy(tpe) = paramBinding(tpe.name)
+ case _ =>
+ }
+
+ /** Register type of leaf node */
+ private def registerLeaf(tree: Tree): Unit = tree match {
+ case _: This | _: Ident | _: TypeTree =>
+ tree.tpe.foreachPart(registerType, stopAtStatic = true)
+ case _ =>
+ }
+
+ /** The Inlined node representing the inlined call */
+ def inlined(pt: Type) = {
+ // make sure prefix is executed if it is impure
+ if (!isIdempotentExpr(prefix)) registerType(meth.owner.thisType)
+
+ // Register types of all leaves of inlined body so that the `paramProxy` and `thisProxy` maps are defined.
+ rhs.foreachSubTree(registerLeaf)
+
+ // The class that the this-proxy `selfSym` represents
+ def classOf(selfSym: Symbol) = selfSym.info.widen.classSymbol
+
+ // The name of the outer selector that computes the rhs of `selfSym`
+ def outerSelector(selfSym: Symbol): TermName = classOf(selfSym).name.toTermName ++ nme.OUTER_SELECT
+
+ // The total nesting depth of the class represented by `selfSym`.
+ def outerLevel(selfSym: Symbol): Int = classOf(selfSym).ownersIterator.length
+
+ // All needed this-proxies, sorted by nesting depth of the classes they represent (innermost first)
+ val accessedSelfSyms = thisProxy.values.toList.map(_.symbol).sortBy(-outerLevel(_))
+
+ // Compute val-definitions for all this-proxies and append them to `bindingsBuf`
+ var lastSelf: Symbol = NoSymbol
+ for (selfSym <- accessedSelfSyms) {
+ val rhs =
+ if (!lastSelf.exists)
+ prefix
+ else
+ untpd.Select(ref(lastSelf), outerSelector(selfSym)).withType(selfSym.info)
+ bindingsBuf += ValDef(selfSym.asTerm, rhs)
+ lastSelf = selfSym
+ }
+
+ // The type map to apply to the inlined tree. This maps references to this-types
+ // and parameters to type references of their arguments or proxies.
+ val typeMap = new TypeMap {
+ def apply(t: Type) = t match {
+ case t: ThisType => thisProxy.getOrElse(t.cls, t)
+ case t: TypeRef => paramProxy.getOrElse(t, mapOver(t))
+ case t: SingletonType => paramProxy.getOrElse(t, mapOver(t))
+ case t => mapOver(t)
+ }
+ }
+
+ // The tree map to apply to the inlined tree. This maps references to this-types
+ // and parameters to references of their arguments or their proxies.
+ def treeMap(tree: Tree) = {
+ tree match {
+ case _: This =>
+ tree.tpe match {
+ case thistpe: ThisType =>
+ thisProxy.get(thistpe.cls) match {
+ case Some(t) => ref(t).withPos(tree.pos)
+ case None => tree
+ }
+ case _ => tree
+ }
+ case _: Ident =>
+ paramProxy.get(tree.tpe) match {
+ case Some(t: SingletonType) if tree.isTerm => singleton(t).withPos(tree.pos)
+ case Some(t) if tree.isType => TypeTree(t).withPos(tree.pos)
+ case None => tree
+ }
+ case _ => tree
+ }}
+
+ // The complete translation maps referenves to this and parameters to
+ // corresponding arguments or proxies on the type and term level. It also changes
+ // the owner from the inlined method to the current owner.
+ val inliner = new TreeTypeMap(typeMap, treeMap, meth :: Nil, ctx.owner :: Nil)
+
+ val expansion = inliner(rhs.withPos(call.pos))
+ ctx.traceIndented(i"inlining $call\n, BINDINGS =\n${bindingsBuf.toList}%\n%\nEXPANSION =\n$expansion", inlining, show = true) {
+
+ // The final expansion runs a typing pass over the inlined tree. See InlineTyper for details.
+ val expansion1 = InlineTyper.typed(expansion, pt)(inlineContext(call))
+
+ /** Does given definition bind a closure that will be inlined? */
+ def bindsDeadClosure(defn: ValOrDefDef) = Ident(defn.symbol.termRef) match {
+ case InlineableClosure(_) => !InlineTyper.retainedClosures.contains(defn.symbol)
+ case _ => false
+ }
+
+ /** All bindings in `bindingsBuf` except bindings of inlineable closures */
+ val bindings = bindingsBuf.toList.filterNot(bindsDeadClosure).map(_.withPos(call.pos))
+
+ tpd.Inlined(call, bindings, expansion1)
+ }
+ }
+
+ /** An extractor for references to closure arguments that refer to `@inline` methods */
+ private object InlineableClosure {
+ lazy val paramProxies = paramProxy.values.toSet
+ def unapply(tree: Ident)(implicit ctx: Context): Option[Tree] =
+ if (paramProxies.contains(tree.tpe)) {
+ bindingsBuf.find(_.name == tree.name) match {
+ case Some(ddef: ValDef) if ddef.symbol.is(Inline) =>
+ ddef.rhs match {
+ case closure(_, meth, _) => Some(meth)
+ case _ => None
+ }
+ case _ => None
+ }
+ } else None
+ }
+
+ /** A typer for inlined code. Its purpose is:
+ * 1. Implement constant folding over inlined code
+ * 2. Selectively expand ifs with constant conditions
+ * 3. Inline arguments that are inlineable closures
+ * 4. Make sure inlined code is type-correct.
+ * 5. Make sure that the tree's typing is idempotent (so that future -Ycheck passes succeed)
+ */
+ private object InlineTyper extends ReTyper {
+
+ var retainedClosures = Set[Symbol]()
+
+ override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context) = {
+ val tree1 = super.typedIdent(tree, pt)
+ tree1 match {
+ case InlineableClosure(_) => retainedClosures += tree.symbol
+ case _ =>
+ }
+ tree1
+ }
+
+ override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
+ val res = super.typedSelect(tree, pt)
+ ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.pos)
+ res
+ }
+
+ override def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) = {
+ val cond1 = typed(tree.cond, defn.BooleanType)
+ cond1.tpe.widenTermRefExpr match {
+ case ConstantType(Constant(condVal: Boolean)) =>
+ val selected = typed(if (condVal) tree.thenp else tree.elsep, pt)
+ if (isIdempotentExpr(cond1)) selected
+ else Block(cond1 :: Nil, selected)
+ case _ =>
+ val if1 = untpd.cpy.If(tree)(cond = untpd.TypedSplice(cond1))
+ super.typedIf(if1, pt)
+ }
+ }
+
+ override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context) = tree.asInstanceOf[tpd.Tree] match {
+ case Apply(Select(InlineableClosure(fn), nme.apply), args) =>
+ inlining.println(i"reducing $tree with closure $fn")
+ typed(fn.appliedToArgs(args), pt)
+ case _ =>
+ super.typedApply(tree, pt)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala
new file mode 100644
index 000000000..148cf1da7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala
@@ -0,0 +1,1061 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast._
+import Trees._, Constants._, StdNames._, Scopes._, Denotations._, Comments._
+import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._, Decorators._
+import ast.desugar, ast.desugar._
+import ProtoTypes._
+import util.Positions._
+import util.{Property, SourcePosition, DotClass}
+import collection.mutable
+import annotation.tailrec
+import ErrorReporting._
+import tpd.ListOfTreeDecorator
+import config.Config
+import config.Printers.{typr, completions, noPrinter}
+import Annotations._
+import Inferencing._
+import transform.ValueClasses._
+import TypeApplications._
+import language.implicitConversions
+import reporting.diagnostic.messages._
+
+trait NamerContextOps { this: Context =>
+
+ /** Enter symbol into current class, if current class is owner of current context,
+ * or into current scope, if not. Should always be called instead of scope.enter
+ * in order to make sure that updates to class members are reflected in
+ * finger prints.
+ */
+ def enter(sym: Symbol): Symbol = {
+ ctx.owner match {
+ case cls: ClassSymbol => cls.enter(sym)
+ case _ => this.scope.openForMutations.enter(sym)
+ }
+ sym
+ }
+
+ /** The denotation with the given name in current context */
+ def denotNamed(name: Name): Denotation =
+ if (owner.isClass)
+ if (outer.owner == owner) { // inner class scope; check whether we are referring to self
+ if (scope.size == 1) {
+ val elem = scope.lastEntry
+ if (elem.name == name) return elem.sym.denot // return self
+ }
+ assert(scope.size <= 1, scope)
+ owner.thisType.member(name)
+ }
+ else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext.
+ owner.findMember(name, owner.thisType, EmptyFlags)
+ else
+ scope.denotsNamed(name).toDenot(NoPrefix)
+
+ /** Either the current scope, or, if the current context owner is a class,
+ * the declarations of the current class.
+ */
+ def effectiveScope: Scope =
+ if (owner != null && owner.isClass) owner.asClass.unforcedDecls
+ else scope
+
+ /** The symbol (stored in some typer's symTree) of an enclosing context definition */
+ def symOfContextTree(tree: untpd.Tree) = {
+ def go(ctx: Context): Symbol = {
+ ctx.typeAssigner match {
+ case typer: Typer =>
+ tree.getAttachment(typer.SymOfTree) match {
+ case Some(sym) => sym
+ case None =>
+ var cx = ctx.outer
+ while (cx.typeAssigner eq typer) cx = cx.outer
+ go(cx)
+ }
+ case _ => NoSymbol
+ }
+ }
+ go(this)
+ }
+
+ /** Context where `sym` is defined, assuming we are in a nested context. */
+ def defContext(sym: Symbol) =
+ outersIterator
+ .dropWhile(_.owner != sym)
+ .dropWhile(_.owner == sym)
+ .next
+
+ /** The given type, unless `sym` is a constructor, in which case the
+ * type of the constructed instance is returned
+ */
+ def effectiveResultType(sym: Symbol, typeParams: List[Symbol], given: Type) =
+ if (sym.name == nme.CONSTRUCTOR) sym.owner.typeRef.appliedTo(typeParams map (_.typeRef))
+ else given
+
+ /** if isConstructor, make sure it has one non-implicit parameter list */
+ def normalizeIfConstructor(paramSymss: List[List[Symbol]], isConstructor: Boolean) =
+ if (isConstructor &&
+ (paramSymss.isEmpty || paramSymss.head.nonEmpty && (paramSymss.head.head is Implicit)))
+ Nil :: paramSymss
+ else
+ paramSymss
+
+ /** The method type corresponding to given parameters and result type */
+ def methodType(typeParams: List[Symbol], valueParamss: List[List[Symbol]], resultType: Type, isJava: Boolean = false)(implicit ctx: Context): Type = {
+ val monotpe =
+ (valueParamss :\ resultType) { (params, resultType) =>
+ val make =
+ if (params.nonEmpty && (params.head is Implicit)) ImplicitMethodType
+ else if (isJava) JavaMethodType
+ else MethodType
+ if (isJava)
+ for (param <- params)
+ if (param.info.isDirectRef(defn.ObjectClass)) param.info = defn.AnyType
+ make.fromSymbols(params, resultType)
+ }
+ if (typeParams.nonEmpty) monotpe.LambdaAbstract(typeParams)
+ else if (valueParamss.isEmpty) ExprType(monotpe)
+ else monotpe
+ }
+
+ /** Find moduleClass/sourceModule in effective scope */
+ private def findModuleBuddy(name: Name)(implicit ctx: Context) = {
+ val scope = effectiveScope
+ val it = scope.lookupAll(name).filter(_ is Module)
+ assert(it.hasNext, s"no companion $name in $scope")
+ it.next
+ }
+
+ /** Add moduleClass or sourceModule functionality to completer
+ * for a module or module class
+ */
+ def adjustModuleCompleter(completer: LazyType, name: Name) =
+ if (name.isTermName)
+ completer withModuleClass (_ => findModuleBuddy(name.moduleClassName))
+ else
+ completer withSourceModule (_ => findModuleBuddy(name.sourceModuleName))
+}
+
+/** This class creates symbols from definitions and imports and gives them
+ * lazy types.
+ *
+ * Timeline:
+ *
+ * During enter, trees are expanded as necessary, populating the expandedTree map.
+ * Symbols are created, and the symOfTree map is set up.
+ *
+ * Symbol completion causes some trees to be already typechecked and typedTree
+ * entries are created to associate the typed trees with the untyped expanded originals.
+ *
+ * During typer, original trees are first expanded using expandedTree. For each
+ * expanded member definition or import we extract and remove the corresponding symbol
+ * from the symOfTree map and complete it. We then consult the typedTree map to see
+ * whether a typed tree exists already. If yes, the typed tree is returned as result.
+ * Otherwise, we proceed with regular type checking.
+ *
+ * The scheme is designed to allow sharing of nodes, as long as each duplicate appears
+ * in a different method.
+ */
+class Namer { typer: Typer =>
+
+ import untpd._
+
+ val TypedAhead = new Property.Key[tpd.Tree]
+ val ExpandedTree = new Property.Key[Tree]
+ val SymOfTree = new Property.Key[Symbol]
+
+ /** A partial map from unexpanded member and pattern defs and to their expansions.
+ * Populated during enterSyms, emptied during typer.
+ */
+ //lazy val expandedTree = new mutable.AnyRefMap[DefTree, Tree]
+ /*{
+ override def default(tree: DefTree) = tree // can't have defaults on AnyRefMaps :-(
+ }*/
+
+ /** A map from expanded MemberDef, PatDef or Import trees to their symbols.
+ * Populated during enterSyms, emptied at the point a typed tree
+ * with the same symbol is created (this can be when the symbol is completed
+ * or at the latest when the tree is typechecked.
+ */
+ //lazy val symOfTree = new mutable.AnyRefMap[Tree, Symbol]
+
+ /** A map from expanded trees to their typed versions.
+ * Populated when trees are typechecked during completion (using method typedAhead).
+ */
+ // lazy val typedTree = new mutable.AnyRefMap[Tree, tpd.Tree]
+
+ /** A map from method symbols to nested typers.
+ * Populated when methods are completed. Emptied when they are typechecked.
+ * The nested typer contains new versions of the four maps above including this
+ * one, so that trees that are shared between different DefDefs can be independently
+ * used as indices. It also contains a scope that contains nested parameters.
+ */
+ lazy val nestedTyper = new mutable.AnyRefMap[Symbol, Typer]
+
+ /** The scope of the typer.
+ * For nested typers this is a place parameters are entered during completion
+ * and where they survive until typechecking. A context with this typer also
+ * has this scope.
+ */
+ val scope = newScope
+
+ /** The symbol of the given expanded tree. */
+ def symbolOfTree(tree: Tree)(implicit ctx: Context): Symbol = {
+ val xtree = expanded(tree)
+ xtree.getAttachment(TypedAhead) match {
+ case Some(ttree) => ttree.symbol
+ case none => xtree.attachment(SymOfTree)
+ }
+ }
+
+ /** The enclosing class with given name; error if none exists */
+ def enclosingClassNamed(name: TypeName, pos: Position)(implicit ctx: Context): Symbol = {
+ if (name.isEmpty) NoSymbol
+ else {
+ val cls = ctx.owner.enclosingClassNamed(name)
+ if (!cls.exists) ctx.error(s"no enclosing class or object is named $name", pos)
+ cls
+ }
+ }
+
+ /** Record `sym` as the symbol defined by `tree` */
+ def recordSym(sym: Symbol, tree: Tree)(implicit ctx: Context): Symbol = {
+ val refs = tree.attachmentOrElse(References, Nil)
+ if (refs.nonEmpty) {
+ tree.removeAttachment(References)
+ refs foreach (_.pushAttachment(OriginalSymbol, sym))
+ }
+ tree.pushAttachment(SymOfTree, sym)
+ sym
+ }
+
+ /** If this tree is a member def or an import, create a symbol of it
+ * and store in symOfTree map.
+ */
+ def createSymbol(tree: Tree)(implicit ctx: Context): Symbol = {
+
+ def privateWithinClass(mods: Modifiers) =
+ enclosingClassNamed(mods.privateWithin, mods.pos)
+
+ def checkFlags(flags: FlagSet) =
+ if (flags.isEmpty) flags
+ else {
+ val (ok, adapted, kind) = tree match {
+ case tree: TypeDef => (flags.isTypeFlags, flags.toTypeFlags, "type")
+ case _ => (flags.isTermFlags, flags.toTermFlags, "value")
+ }
+ if (!ok)
+ ctx.error(i"modifier(s) `$flags' incompatible with $kind definition", tree.pos)
+ adapted
+ }
+
+ /** Add moduleClass/sourceModule to completer if it is for a module val or class */
+ def adjustIfModule(completer: LazyType, tree: MemberDef) =
+ if (tree.mods is Module) ctx.adjustModuleCompleter(completer, tree.name.encode)
+ else completer
+
+ typr.println(i"creating symbol for $tree in ${ctx.mode}")
+
+ def checkNoConflict(name: Name): Name = {
+ def errorName(msg: => String) = {
+ ctx.error(msg, tree.pos)
+ name.freshened
+ }
+ def preExisting = ctx.effectiveScope.lookup(name)
+ if (ctx.owner is PackageClass)
+ if (preExisting.isDefinedInCurrentRun)
+ errorName(s"${preExisting.showLocated} has already been compiled\nonce during this run")
+ else name
+ else
+ if ((!ctx.owner.isClass || name.isTypeName) && preExisting.exists)
+ errorName(i"$name is already defined as $preExisting")
+ else name
+ }
+
+ val inSuperCall = if (ctx.mode is Mode.InSuperCall) InSuperCall else EmptyFlags
+
+ tree match {
+ case tree: TypeDef if tree.isClassDef =>
+ val name = checkNoConflict(tree.name.encode).asTypeName
+ val flags = checkFlags(tree.mods.flags &~ Implicit)
+ val cls = recordSym(ctx.newClassSymbol(
+ ctx.owner, name, flags | inSuperCall,
+ cls => adjustIfModule(new ClassCompleter(cls, tree)(ctx), tree),
+ privateWithinClass(tree.mods), tree.namePos, ctx.source.file), tree)
+ cls.completer.asInstanceOf[ClassCompleter].init()
+ cls
+ case tree: MemberDef =>
+ val name = checkNoConflict(tree.name.encode)
+ val flags = checkFlags(tree.mods.flags)
+ val isDeferred = lacksDefinition(tree)
+ val deferred = if (isDeferred) Deferred else EmptyFlags
+ val method = if (tree.isInstanceOf[DefDef]) Method else EmptyFlags
+ val inSuperCall1 = if (tree.mods is ParamOrAccessor) EmptyFlags else inSuperCall
+ // suppress inSuperCall for constructor parameters
+ val higherKinded = tree match {
+ case TypeDef(_, PolyTypeTree(_, _)) if isDeferred => HigherKinded
+ case _ => EmptyFlags
+ }
+
+ // to complete a constructor, move one context further out -- this
+ // is the context enclosing the class. Note that the context in which a
+ // constructor is recorded and the context in which it is completed are
+ // different: The former must have the class as owner (because the
+ // constructor is owned by the class), the latter must not (because
+ // constructor parameters are interpreted as if they are outside the class).
+ // Don't do this for Java constructors because they need to see the import
+ // of the companion object, and it is not necessary for them because they
+ // have no implementation.
+ val cctx = if (tree.name == nme.CONSTRUCTOR && !(tree.mods is JavaDefined)) ctx.outer else ctx
+
+ val completer = tree match {
+ case tree: TypeDef => new TypeDefCompleter(tree)(cctx)
+ case _ => new Completer(tree)(cctx)
+ }
+
+ recordSym(ctx.newSymbol(
+ ctx.owner, name, flags | deferred | method | higherKinded | inSuperCall1,
+ adjustIfModule(completer, tree),
+ privateWithinClass(tree.mods), tree.namePos), tree)
+ case tree: Import =>
+ recordSym(ctx.newSymbol(
+ ctx.owner, nme.IMPORT, Synthetic, new Completer(tree), NoSymbol, tree.pos), tree)
+ case _ =>
+ NoSymbol
+ }
+ }
+
+ /** If `sym` exists, enter it in effective scope. Check that
+ * package members are not entered twice in the same run.
+ */
+ def enterSymbol(sym: Symbol)(implicit ctx: Context) = {
+ if (sym.exists) {
+ typr.println(s"entered: $sym in ${ctx.owner} and ${ctx.effectiveScope}")
+ ctx.enter(sym)
+ }
+ sym
+ }
+
+ /** Create package if it does not yet exist. */
+ private def createPackageSymbol(pid: RefTree)(implicit ctx: Context): Symbol = {
+ val pkgOwner = pid match {
+ case Ident(_) => if (ctx.owner eq defn.EmptyPackageClass) defn.RootClass else ctx.owner
+ case Select(qual: RefTree, _) => createPackageSymbol(qual).moduleClass
+ }
+ val existing = pkgOwner.info.decls.lookup(pid.name)
+
+ if ((existing is Package) && (pkgOwner eq existing.owner)) existing
+ else {
+ /** If there's already an existing type, then the package is a dup of this type */
+ val existingType = pkgOwner.info.decls.lookup(pid.name.toTypeName)
+ if (existingType.exists) {
+ ctx.error(PkgDuplicateSymbol(existingType), pid.pos)
+ ctx.newCompletePackageSymbol(pkgOwner, (pid.name ++ "$_error_").toTermName).entered
+ }
+ else ctx.newCompletePackageSymbol(pkgOwner, pid.name.asTermName).entered
+ }
+ }
+
+ /** Expand tree and store in `expandedTree` */
+ def expand(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case mdef: DefTree =>
+ val expanded = desugar.defTree(mdef)
+ typr.println(i"Expansion: $mdef expands to $expanded")
+ if (expanded ne mdef) mdef.pushAttachment(ExpandedTree, expanded)
+ case _ =>
+ }
+
+ /** The expanded version of this tree, or tree itself if not expanded */
+ def expanded(tree: Tree)(implicit ctx: Context): Tree = tree match {
+ case ddef: DefTree => ddef.attachmentOrElse(ExpandedTree, ddef)
+ case _ => tree
+ }
+
+ /** A new context that summarizes an import statement */
+ def importContext(sym: Symbol, selectors: List[Tree])(implicit ctx: Context) =
+ ctx.fresh.setImportInfo(new ImportInfo(sym, selectors))
+
+ /** A new context for the interior of a class */
+ def inClassContext(selfInfo: DotClass /* Should be Type | Symbol*/)(implicit ctx: Context): Context = {
+ val localCtx: Context = ctx.fresh.setNewScope
+ selfInfo match {
+ case sym: Symbol if sym.exists && sym.name != nme.WILDCARD =>
+ localCtx.scope.openForMutations.enter(sym)
+ case _ =>
+ }
+ localCtx
+ }
+
+ /** For all class definitions `stat` in `xstats`: If the companion class if
+ * not also defined in `xstats`, invalidate it by setting its info to
+ * NoType.
+ */
+ def invalidateCompanions(pkg: Symbol, xstats: List[untpd.Tree])(implicit ctx: Context): Unit = {
+ val definedNames = xstats collect { case stat: NameTree => stat.name }
+ def invalidate(name: TypeName) =
+ if (!(definedNames contains name)) {
+ val member = pkg.info.decl(name).asSymDenotation
+ if (member.isClass && !(member is Package)) member.info = NoType
+ }
+ xstats foreach {
+ case stat: TypeDef if stat.isClassDef =>
+ invalidate(stat.name.moduleClassName)
+ case _ =>
+ }
+ }
+
+ /** Expand tree and create top-level symbols for statement and enter them into symbol table */
+ def index(stat: Tree)(implicit ctx: Context): Context = {
+ expand(stat)
+ indexExpanded(stat)
+ }
+
+ /** Create top-level symbols for all statements in the expansion of this statement and
+ * enter them into symbol table
+ */
+ def indexExpanded(origStat: Tree)(implicit ctx: Context): Context = {
+ def recur(stat: Tree): Context = stat match {
+ case pcl: PackageDef =>
+ val pkg = createPackageSymbol(pcl.pid)
+ index(pcl.stats)(ctx.fresh.setOwner(pkg.moduleClass))
+ invalidateCompanions(pkg, Trees.flatten(pcl.stats map expanded))
+ setDocstring(pkg, stat)
+ ctx
+ case imp: Import =>
+ importContext(createSymbol(imp), imp.selectors)
+ case mdef: DefTree =>
+ val sym = enterSymbol(createSymbol(mdef))
+ setDocstring(sym, origStat)
+ addEnumConstants(mdef, sym)
+ ctx
+ case stats: Thicket =>
+ stats.toList.foreach(recur)
+ ctx
+ case _ =>
+ ctx
+ }
+ recur(expanded(origStat))
+ }
+
+ /** Determines whether this field holds an enum constant.
+ * To qualify, the following conditions must be met:
+ * - The field's class has the ENUM flag set
+ * - The field's class extends java.lang.Enum
+ * - The field has the ENUM flag set
+ * - The field is static
+ * - The field is stable
+ */
+ def isEnumConstant(vd: ValDef)(implicit ctx: Context) = {
+ // val ownerHasEnumFlag =
+ // Necessary to check because scalac puts Java's static members into the companion object
+ // while Scala's enum constants live directly in the class.
+ // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+ // cyclic reference error. See the commit message for details.
+ // if (ctx.compilationUnit.isJava) ctx.owner.companionClass.is(Enum) else ctx.owner.is(Enum)
+ vd.mods.is(allOf(Enum, Stable, JavaStatic, JavaDefined)) // && ownerHasEnumFlag
+ }
+
+ /** Add java enum constants */
+ def addEnumConstants(mdef: DefTree, sym: Symbol)(implicit ctx: Context): Unit = mdef match {
+ case vdef: ValDef if (isEnumConstant(vdef)) =>
+ val enumClass = sym.owner.linkedClass
+ if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
+ enumClass.addAnnotation(Annotation.makeChild(sym))
+ case _ =>
+ }
+
+
+ def setDocstring(sym: Symbol, tree: Tree)(implicit ctx: Context) = tree match {
+ case t: MemberDef if t.rawComment.isDefined =>
+ ctx.docCtx.foreach(_.addDocstring(sym, t.rawComment))
+ case _ => ()
+ }
+
+ /** Create top-level symbols for statements and enter them into symbol table */
+ def index(stats: List[Tree])(implicit ctx: Context): Context = {
+
+ val classDef = mutable.Map[TypeName, TypeDef]()
+ val moduleDef = mutable.Map[TypeName, TypeDef]()
+
+ /** Merge the definitions of a synthetic companion generated by a case class
+ * and the real companion, if both exist.
+ */
+ def mergeCompanionDefs() = {
+ for (cdef @ TypeDef(name, _) <- stats)
+ if (cdef.isClassDef) {
+ classDef(name) = cdef
+ cdef.attachmentOrElse(ExpandedTree, cdef) match {
+ case Thicket(cls :: mval :: (mcls @ TypeDef(_, _: Template)) :: crest) =>
+ moduleDef(name) = mcls
+ case _ =>
+ }
+ }
+ for (mdef @ ModuleDef(name, _) <- stats if !mdef.mods.is(Flags.Package)) {
+ val typName = name.toTypeName
+ val Thicket(vdef :: (mcls @ TypeDef(_, impl: Template)) :: Nil) = mdef.attachment(ExpandedTree)
+ moduleDef(typName) = mcls
+ classDef get name.toTypeName match {
+ case Some(cdef) =>
+ cdef.attachmentOrElse(ExpandedTree, cdef) match {
+ case Thicket(cls :: mval :: TypeDef(_, compimpl: Template) :: crest) =>
+ val mcls1 = cpy.TypeDef(mcls)(
+ rhs = cpy.Template(impl)(body = compimpl.body ++ impl.body))
+ mdef.putAttachment(ExpandedTree, Thicket(vdef :: mcls1 :: Nil))
+ moduleDef(typName) = mcls1
+ cdef.putAttachment(ExpandedTree, Thicket(cls :: crest))
+ case _ =>
+ }
+ case none =>
+ }
+ }
+ }
+
+ def createLinks(classTree: TypeDef, moduleTree: TypeDef)(implicit ctx: Context) = {
+ val claz = ctx.denotNamed(classTree.name.encode).symbol
+ val modl = ctx.denotNamed(moduleTree.name.encode).symbol
+ ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, claz, modl).entered
+ ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, modl, claz).entered
+ }
+
+ def createCompanionLinks(implicit ctx: Context): Unit = {
+ for (cdef @ TypeDef(name, _) <- classDef.values) {
+ moduleDef.getOrElse(name, EmptyTree) match {
+ case t: TypeDef =>
+ createLinks(cdef, t)
+ case EmptyTree =>
+ }
+ }
+ }
+
+ stats foreach expand
+ mergeCompanionDefs()
+ val ctxWithStats = (ctx /: stats) ((ctx, stat) => indexExpanded(stat)(ctx))
+ createCompanionLinks(ctxWithStats)
+ ctxWithStats
+ }
+
+ /** The completer of a symbol defined by a member def or import (except ClassSymbols) */
+ class Completer(val original: Tree)(implicit ctx: Context) extends LazyType {
+
+ protected def localContext(owner: Symbol) = ctx.fresh.setOwner(owner).setTree(original)
+
+ protected def typeSig(sym: Symbol): Type = original match {
+ case original: ValDef =>
+ if (sym is Module) moduleValSig(sym)
+ else valOrDefDefSig(original, sym, Nil, Nil, identity)(localContext(sym).setNewScope)
+ case original: DefDef =>
+ val typer1 = ctx.typer.newLikeThis
+ nestedTyper(sym) = typer1
+ typer1.defDefSig(original, sym)(localContext(sym).setTyper(typer1))
+ case imp: Import =>
+ try {
+ val expr1 = typedAheadExpr(imp.expr, AnySelectionProto)
+ ImportType(expr1)
+ } catch {
+ case ex: CyclicReference =>
+ typr.println(s"error while completing ${imp.expr}")
+ throw ex
+ }
+ }
+
+ final override def complete(denot: SymDenotation)(implicit ctx: Context) = {
+ if (completions != noPrinter && ctx.typerState != this.ctx.typerState) {
+ completions.println(completions.getClass.toString)
+ def levels(c: Context): Int =
+ if (c.typerState eq this.ctx.typerState) 0
+ else if (c.typerState == null) -1
+ else if (c.outer.typerState == c.typerState) levels(c.outer)
+ else levels(c.outer) + 1
+ completions.println(s"!!!completing ${denot.symbol.showLocated} in buried typerState, gap = ${levels(ctx)}")
+ }
+ completeInCreationContext(denot)
+ }
+
+ protected def addAnnotations(denot: SymDenotation): Unit = original match {
+ case original: untpd.MemberDef =>
+ var hasInlineAnnot = false
+ for (annotTree <- untpd.modsDeco(original).mods.annotations) {
+ val cls = typedAheadAnnotation(annotTree)
+ val ann = Annotation.deferred(cls, implicit ctx => typedAnnotation(annotTree))
+ denot.addAnnotation(ann)
+ if (cls == defn.InlineAnnot && denot.is(Method, butNot = Accessor))
+ denot.setFlag(Inline)
+ }
+ case _ =>
+ }
+
+ private def addInlineInfo(denot: SymDenotation) = original match {
+ case original: untpd.DefDef if denot.isInlineMethod =>
+ Inliner.registerInlineInfo(
+ denot,
+ implicit ctx => typedAheadExpr(original).asInstanceOf[tpd.DefDef].rhs
+ )(localContext(denot.symbol))
+ case _ =>
+ }
+
+ /** Intentionally left without `implicit ctx` parameter. We need
+ * to pick up the context at the point where the completer was created.
+ */
+ def completeInCreationContext(denot: SymDenotation): Unit = {
+ addAnnotations(denot)
+ addInlineInfo(denot)
+ denot.info = typeSig(denot.symbol)
+ Checking.checkWellFormed(denot.symbol)
+ }
+ }
+
+ class TypeDefCompleter(original: TypeDef)(ictx: Context) extends Completer(original)(ictx) with TypeParamsCompleter {
+ private var myTypeParams: List[TypeSymbol] = null
+ private var nestedCtx: Context = null
+ assert(!original.isClassDef)
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] = {
+ if (myTypeParams == null) {
+ //println(i"completing type params of $sym in ${sym.owner}")
+ nestedCtx = localContext(sym).setNewScope
+ myTypeParams = {
+ implicit val ctx: Context = nestedCtx
+ val tparams = original.rhs match {
+ case PolyTypeTree(tparams, _) => tparams
+ case _ => Nil
+ }
+ completeParams(tparams)
+ tparams.map(symbolOfTree(_).asType)
+ }
+ }
+ myTypeParams
+ }
+
+ override protected def typeSig(sym: Symbol): Type =
+ typeDefSig(original, sym, completerTypeParams(sym)(ictx))(nestedCtx)
+ }
+
+ class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) {
+ withDecls(newScope)
+
+ protected implicit val ctx: Context = localContext(cls).setMode(ictx.mode &~ Mode.InSuperCall)
+
+ val TypeDef(name, impl @ Template(constr, parents, self, _)) = original
+
+ val (params, rest) = impl.body span {
+ case td: TypeDef => td.mods is Param
+ case vd: ValDef => vd.mods is ParamAccessor
+ case _ => false
+ }
+
+ def init() = index(params)
+
+ /** The type signature of a ClassDef with given symbol */
+ override def completeInCreationContext(denot: SymDenotation): Unit = {
+
+ /* The type of a parent constructor. Types constructor arguments
+ * only if parent type contains uninstantiated type parameters.
+ */
+ def parentType(parent: untpd.Tree)(implicit ctx: Context): Type =
+ if (parent.isType) {
+ typedAheadType(parent, AnyTypeConstructorProto).tpe
+ } else {
+ val (core, targs) = stripApply(parent) match {
+ case TypeApply(core, targs) => (core, targs)
+ case core => (core, Nil)
+ }
+ val Select(New(tpt), nme.CONSTRUCTOR) = core
+ val targs1 = targs map (typedAheadType(_))
+ val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes
+ if (ptype.typeParams.isEmpty) ptype
+ else typedAheadExpr(parent).tpe
+ }
+
+ /* Check parent type tree `parent` for the following well-formedness conditions:
+ * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix)
+ * (2) If may not derive from itself
+ * (3) Overriding type parameters must be correctly forwarded. (@see checkTypeParamOverride)
+ * (4) The class is not final
+ * (5) If the class is sealed, it is defined in the same compilation unit as the current class
+ */
+ def checkedParentType(parent: untpd.Tree, paramAccessors: List[Symbol]): Type = {
+ val ptype = parentType(parent)(ctx.superCallContext)
+ if (cls.isRefinementClass) ptype
+ else {
+ val pt = checkClassType(ptype, parent.pos,
+ traitReq = parent ne parents.head, stablePrefixReq = true)
+ if (pt.derivesFrom(cls)) {
+ val addendum = parent match {
+ case Select(qual: Super, _) if ctx.scala2Mode =>
+ "\n(Note that inheriting a class of the same name is no longer allowed)"
+ case _ => ""
+ }
+ ctx.error(i"cyclic inheritance: $cls extends itself$addendum", parent.pos)
+ defn.ObjectType
+ }
+ else if (!paramAccessors.forall(checkTypeParamOverride(pt, _)))
+ defn.ObjectType
+ else {
+ val pclazz = pt.typeSymbol
+ if (pclazz.is(Final))
+ ctx.error(em"cannot extend final $pclazz", cls.pos)
+ if (pclazz.is(Sealed) && pclazz.associatedFile != cls.associatedFile)
+ ctx.error(em"cannot extend sealed $pclazz in different compilation unit", cls.pos)
+ pt
+ }
+ }
+ }
+
+ /* Check that every parameter with the same name as a visible named parameter in the parent
+ * class satisfies the following two conditions:
+ * (1) The overriding parameter is also named (i.e. not local/name mangled).
+ * (2) The overriding parameter is passed on directly to the parent parameter, or the
+ * parent parameter is not fully defined.
+ * @return true if conditions are satisfied, false otherwise.
+ */
+ def checkTypeParamOverride(parent: Type, paramAccessor: Symbol): Boolean = {
+ var ok = true
+ val pname = paramAccessor.name
+
+ def illegal(how: String): Unit = {
+ ctx.error(em"Illegal override of public type parameter $pname in $parent$how", paramAccessor.pos)
+ ok = false
+ }
+
+ def checkAlias(tp: Type): Unit = tp match {
+ case tp: RefinedType =>
+ if (tp.refinedName == pname)
+ tp.refinedInfo match {
+ case TypeAlias(alias) =>
+ alias match {
+ case TypeRef(pre, name1) if name1 == pname && (pre =:= cls.thisType) =>
+ // OK, parameter is passed on directly
+ case _ =>
+ illegal(em".\nParameter is both redeclared and instantiated with $alias.")
+ }
+ case _ => // OK, argument is not fully defined
+ }
+ else checkAlias(tp.parent)
+ case _ =>
+ }
+ if (parent.nonPrivateMember(paramAccessor.name).symbol.is(Param))
+ if (paramAccessor is Private)
+ illegal("\nwith private parameter. Parameter definition needs to be prefixed with `type'.")
+ else
+ checkAlias(parent)
+ ok
+ }
+
+ addAnnotations(denot)
+
+ val selfInfo =
+ if (self.isEmpty) NoType
+ else if (cls.is(Module)) {
+ val moduleType = cls.owner.thisType select sourceModule
+ if (self.name == nme.WILDCARD) moduleType
+ else recordSym(
+ ctx.newSymbol(cls, self.name, self.mods.flags, moduleType, coord = self.pos),
+ self)
+ }
+ else createSymbol(self)
+
+ // pre-set info, so that parent types can refer to type params
+ val tempInfo = new TempClassInfo(cls.owner.thisType, cls, decls, selfInfo)
+ denot.info = tempInfo
+
+ // Ensure constructor is completed so that any parameter accessors
+ // which have type trees deriving from its parameters can be
+ // completed in turn. Note that parent types access such parameter
+ // accessors, that's why the constructor needs to be completed before
+ // the parent types are elaborated.
+ index(constr)
+ symbolOfTree(constr).ensureCompleted()
+
+ index(rest)(inClassContext(selfInfo))
+
+ val tparamAccessors = decls.filter(_ is TypeParamAccessor).toList
+ val parentTypes = ensureFirstIsClass(parents.map(checkedParentType(_, tparamAccessors)))
+ val parentRefs = ctx.normalizeToClassRefs(parentTypes, cls, decls)
+ typr.println(s"completing $denot, parents = $parents, parentTypes = $parentTypes, parentRefs = $parentRefs")
+
+ tempInfo.finalize(denot, parentRefs)
+
+ Checking.checkWellFormed(cls)
+ if (isDerivedValueClass(cls)) cls.setFlag(Final)
+ cls.setApplicableFlags(
+ (NoInitsInterface /: impl.body)((fs, stat) => fs & defKind(stat)))
+ }
+ }
+
+ /** Typecheck tree during completion, and remember result in typedtree map */
+ private def typedAheadImpl(tree: Tree, pt: Type)(implicit ctx: Context): tpd.Tree = {
+ val xtree = expanded(tree)
+ xtree.getAttachment(TypedAhead) match {
+ case Some(ttree) => ttree
+ case none =>
+ val ttree = typer.typed(tree, pt)
+ xtree.pushAttachment(TypedAhead, ttree)
+ ttree
+ }
+ }
+
+ def typedAheadType(tree: Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree =
+ typedAheadImpl(tree, pt)(ctx retractMode Mode.PatternOrType addMode Mode.Type)
+
+ def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree =
+ typedAheadImpl(tree, pt)(ctx retractMode Mode.PatternOrType)
+
+ def typedAheadAnnotation(tree: Tree)(implicit ctx: Context): Symbol = tree match {
+ case Apply(fn, _) => typedAheadAnnotation(fn)
+ case TypeApply(fn, _) => typedAheadAnnotation(fn)
+ case Select(qual, nme.CONSTRUCTOR) => typedAheadAnnotation(qual)
+ case New(tpt) => typedAheadType(tpt).tpe.classSymbol
+ }
+
+ /** Enter and typecheck parameter list */
+ def completeParams(params: List[MemberDef])(implicit ctx: Context) = {
+ index(params)
+ for (param <- params) typedAheadExpr(param)
+ }
+
+ /** The signature of a module valdef.
+ * This will compute the corresponding module class TypeRef immediately
+ * without going through the defined type of the ValDef. This is necessary
+ * to avoid cyclic references involving imports and module val defs.
+ */
+ def moduleValSig(sym: Symbol)(implicit ctx: Context): Type = {
+ val clsName = sym.name.moduleClassName
+ val cls = ctx.denotNamed(clsName) suchThat (_ is ModuleClass)
+ ctx.owner.thisType select (clsName, cls)
+ }
+
+ /** The type signature of a ValDef or DefDef
+ * @param mdef The definition
+ * @param sym Its symbol
+ * @param paramFn A wrapping function that produces the type of the
+ * defined symbol, given its final return type
+ */
+ def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, typeParams: List[Symbol], paramss: List[List[Symbol]], paramFn: Type => Type)(implicit ctx: Context): Type = {
+
+ def inferredType = {
+ /** A type for this definition that might be inherited from elsewhere:
+ * If this is a setter parameter, the corresponding getter type.
+ * If this is a class member, the conjunction of all result types
+ * of overridden methods.
+ * NoType if neither case holds.
+ */
+ val inherited =
+ if (sym.owner.isTerm) NoType
+ else {
+ // TODO: Look only at member of supertype instead?
+ lazy val schema = paramFn(WildcardType)
+ val site = sym.owner.thisType
+ ((NoType: Type) /: sym.owner.info.baseClasses.tail) { (tp, cls) =>
+ def instantiatedResType(info: Type, tparams: List[Symbol], paramss: List[List[Symbol]]): Type = info match {
+ case info: PolyType =>
+ if (info.paramNames.length == typeParams.length)
+ instantiatedResType(info.instantiate(tparams.map(_.typeRef)), Nil, paramss)
+ else NoType
+ case info: MethodType =>
+ paramss match {
+ case params :: paramss1 if info.paramNames.length == params.length =>
+ instantiatedResType(info.instantiate(params.map(_.termRef)), tparams, paramss1)
+ case _ =>
+ NoType
+ }
+ case _ =>
+ if (tparams.isEmpty && paramss.isEmpty) info.widenExpr
+ else NoType
+ }
+ val iRawInfo =
+ cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema).info
+ val iResType = instantiatedResType(iRawInfo, typeParams, paramss).asSeenFrom(site, cls)
+ if (iResType.exists)
+ typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inherited: $iResType")
+ tp & iResType
+ }
+ }
+
+ /** The proto-type to be used when inferring the result type from
+ * the right hand side. This is `WildcardType` except if the definition
+ * is a default getter. In that case, the proto-type is the type of
+ * the corresponding parameter where bound parameters are replaced by
+ * Wildcards.
+ */
+ def rhsProto = {
+ val name = sym.asTerm.name
+ val idx = name.defaultGetterIndex
+ if (idx < 0) WildcardType
+ else {
+ val original = name.defaultGetterToMethod
+ val meth: Denotation =
+ if (original.isConstructorName && (sym.owner is ModuleClass))
+ sym.owner.companionClass.info.decl(nme.CONSTRUCTOR)
+ else
+ ctx.defContext(sym).denotNamed(original)
+ def paramProto(paramss: List[List[Type]], idx: Int): Type = paramss match {
+ case params :: paramss1 =>
+ if (idx < params.length) wildApprox(params(idx))
+ else paramProto(paramss1, idx - params.length)
+ case nil =>
+ WildcardType
+ }
+ val defaultAlts = meth.altsWith(_.hasDefaultParams)
+ if (defaultAlts.length == 1)
+ paramProto(defaultAlts.head.info.widen.paramTypess, idx)
+ else
+ WildcardType
+ }
+ }
+
+ // println(s"final inherited for $sym: ${inherited.toString}") !!!
+ // println(s"owner = ${sym.owner}, decls = ${sym.owner.info.decls.show}")
+ def isInline = sym.is(FinalOrInline, butNot = Method | Mutable)
+
+ // Widen rhs type and approximate `|' but keep ConstantTypes if
+ // definition is inline (i.e. final in Scala2).
+ def widenRhs(tp: Type): Type = tp.widenTermRefExpr match {
+ case tp: ConstantType if isInline => tp
+ case _ => ctx.harmonizeUnion(tp.widen)
+ }
+
+ // Replace aliases to Unit by Unit itself. If we leave the alias in
+ // it would be erased to BoxedUnit.
+ def dealiasIfUnit(tp: Type) = if (tp.isRef(defn.UnitClass)) defn.UnitType else tp
+
+ val rhsCtx = ctx.addMode(Mode.InferringReturnType)
+ def rhsType = typedAheadExpr(mdef.rhs, inherited orElse rhsProto)(rhsCtx).tpe
+ def cookedRhsType = ctx.deskolemize(dealiasIfUnit(widenRhs(rhsType)))
+ lazy val lhsType = fullyDefinedType(cookedRhsType, "right-hand side", mdef.pos)
+ //if (sym.name.toString == "y") println(i"rhs = $rhsType, cooked = $cookedRhsType")
+ if (inherited.exists)
+ if (sym.is(Final, butNot = Method) && lhsType.isInstanceOf[ConstantType])
+ lhsType // keep constant types that fill in for a non-constant (to be revised when inline has landed).
+ else inherited
+ else {
+ if (sym is Implicit) {
+ val resStr = if (mdef.isInstanceOf[DefDef]) "result " else ""
+ ctx.error(s"${resStr}type of implicit definition needs to be given explicitly", mdef.pos)
+ sym.resetFlag(Implicit)
+ }
+ lhsType orElse WildcardType
+ }
+ }
+
+ val tptProto = mdef.tpt match {
+ case _: untpd.DerivedTypeTree =>
+ WildcardType
+ case TypeTree() =>
+ inferredType
+ case TypedSplice(tpt: TypeTree) if !isFullyDefined(tpt.tpe, ForceDegree.none) =>
+ val rhsType = typedAheadExpr(mdef.rhs, tpt.tpe).tpe
+ mdef match {
+ case mdef: DefDef if mdef.name == nme.ANON_FUN =>
+ val hygienicType = avoid(rhsType, paramss.flatten)
+ if (!(hygienicType <:< tpt.tpe))
+ ctx.error(i"return type ${tpt.tpe} of lambda cannot be made hygienic;\n" +
+ i"it is not a supertype of the hygienic type $hygienicType", mdef.pos)
+ //println(i"lifting $rhsType over $paramss -> $hygienicType = ${tpt.tpe}")
+ //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe })
+ case _ =>
+ }
+ WildcardType
+ case _ =>
+ WildcardType
+ }
+ paramFn(typedAheadType(mdef.tpt, tptProto).tpe)
+ }
+
+ /** The type signature of a DefDef with given symbol */
+ def defDefSig(ddef: DefDef, sym: Symbol)(implicit ctx: Context) = {
+ val DefDef(name, tparams, vparamss, _, _) = ddef
+ val isConstructor = name == nme.CONSTRUCTOR
+
+ // The following 3 lines replace what was previously just completeParams(tparams).
+ // But that can cause bad bounds being computed, as witnessed by
+ // tests/pos/paramcycle.scala. The problematic sequence is this:
+ // 0. Class constructor gets completed.
+ // 1. Type parameter CP of constructor gets completed
+ // 2. As a first step CP's bounds are set to Nothing..Any.
+ // 3. CP's real type bound demands the completion of corresponding type parameter DP
+ // of enclosing class.
+ // 4. Type parameter DP has a rhs a DerivedFromParam tree, as installed by
+ // desugar.classDef
+ // 5. The completion of DP then copies the current bounds of CP, which are still Nothing..Any.
+ // 6. The completion of CP finishes installing the real type bounds.
+ // Consequence: CP ends up with the wrong bounds!
+ // To avoid this we always complete type parameters of a class before the type parameters
+ // of the class constructor, but after having indexed the constructor parameters (because
+ // indexing is needed to provide a symbol to copy for DP's completion.
+ // With the patch, we get instead the following sequence:
+ // 0. Class constructor gets completed.
+ // 1. Class constructor parameter CP is indexed.
+ // 2. Class parameter DP starts completion.
+ // 3. Info of CP is computed (to be copied to DP).
+ // 4. CP is completed.
+ // 5. Info of CP is copied to DP and DP is completed.
+ index(tparams)
+ if (isConstructor) sym.owner.typeParams.foreach(_.ensureCompleted())
+ for (tparam <- tparams) typedAheadExpr(tparam)
+
+ vparamss foreach completeParams
+ def typeParams = tparams map symbolOfTree
+ val paramSymss = ctx.normalizeIfConstructor(vparamss.nestedMap(symbolOfTree), isConstructor)
+ def wrapMethType(restpe: Type): Type = {
+ val restpe1 = // try to make anonymous functions non-dependent, so that they can be used in closures
+ if (name == nme.ANON_FUN) avoid(restpe, paramSymss.flatten)
+ else restpe
+ ctx.methodType(tparams map symbolOfTree, paramSymss, restpe1, isJava = ddef.mods is JavaDefined)
+ }
+ if (isConstructor) {
+ // set result type tree to unit, but take the current class as result type of the symbol
+ typedAheadType(ddef.tpt, defn.UnitType)
+ wrapMethType(ctx.effectiveResultType(sym, typeParams, NoType))
+ }
+ else valOrDefDefSig(ddef, sym, typeParams, paramSymss, wrapMethType)
+ }
+
+ def typeDefSig(tdef: TypeDef, sym: Symbol, tparamSyms: List[TypeSymbol])(implicit ctx: Context): Type = {
+ def abstracted(tp: Type): Type =
+ if (tparamSyms.nonEmpty) tp.LambdaAbstract(tparamSyms) else tp
+
+ val dummyInfo = abstracted(TypeBounds.empty)
+ sym.info = dummyInfo
+ // Temporarily set info of defined type T to ` >: Nothing <: Any.
+ // This is done to avoid cyclic reference errors for F-bounds.
+ // This is subtle: `sym` has now an empty TypeBounds, but is not automatically
+ // made an abstract type. If it had been made an abstract type, it would count as an
+ // abstract type of its enclosing class, which might make that class an invalid
+ // prefix. I verified this would lead to an error when compiling io.ClassPath.
+ // A distilled version is in pos/prefix.scala.
+ //
+ // The scheme critically relies on an implementation detail of isRef, which
+ // inspects a TypeRef's info, instead of simply dealiasing alias types.
+
+ val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree]
+ val rhs = tdef.rhs match {
+ case PolyTypeTree(_, body) => body
+ case rhs => rhs
+ }
+ val rhsBodyType = typedAheadType(rhs).tpe
+ val rhsType = if (isDerived) rhsBodyType else abstracted(rhsBodyType)
+ val unsafeInfo = rhsType match {
+ case bounds: TypeBounds => bounds
+ case alias => TypeAlias(alias, if (sym is Local) sym.variance else 0)
+ }
+ if (isDerived) sym.info = unsafeInfo
+ else {
+ sym.info = NoCompleter
+ sym.info = checkNonCyclic(sym, unsafeInfo, reportErrors = true)
+ }
+
+ // Here we pay the price for the cavalier setting info to TypeBounds.empty above.
+ // We need to compensate by invalidating caches in references that might
+ // still contain the TypeBounds.empty. If we do not do this, stdlib factories
+ // fail with a bounds error in PostTyper.
+ def ensureUpToDate(tp: Type, outdated: Type) = tp match {
+ case tref: TypeRef if tref.info == outdated && sym.info != outdated =>
+ tref.uncheckedSetSym(null)
+ case _ =>
+ }
+ ensureUpToDate(sym.typeRef, dummyInfo)
+ ensureUpToDate(sym.typeRef.appliedTo(tparamSyms.map(_.typeRef)), TypeBounds.empty)
+ sym.info
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
new file mode 100644
index 000000000..9a20a452e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -0,0 +1,488 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast._
+import Contexts._, Types._, Flags._, Denotations._, Names._, StdNames._, NameOps._, Symbols._
+import Trees._
+import Constants._
+import Scopes._
+import annotation.unchecked
+import util.Positions._
+import util.{Stats, SimpleMap}
+import util.common._
+import Decorators._
+import Uniques._
+import ErrorReporting.errorType
+import config.Printers.typr
+import collection.mutable
+
+object ProtoTypes {
+
+ import tpd._
+
+ /** A trait defining an `isCompatible` method. */
+ trait Compatibility {
+
+ /** Is there an implicit conversion from `tp` to `pt`? */
+ def viewExists(tp: Type, pt: Type)(implicit ctx: Context): Boolean
+
+ /** A type `tp` is compatible with a type `pt` if one of the following holds:
+ * 1. `tp` is a subtype of `pt`
+ * 2. `pt` is by name parameter type, and `tp` is compatible with its underlying type
+ * 3. there is an implicit conversion from `tp` to `pt`.
+ * 4. `tp` is a numeric subtype of `pt` (this case applies even if implicit conversions are disabled)
+ */
+ def isCompatible(tp: Type, pt: Type)(implicit ctx: Context): Boolean =
+ (tp.widenExpr relaxed_<:< pt.widenExpr) || viewExists(tp, pt)
+
+ /** Test compatibility after normalization in a fresh typerstate. */
+ def normalizedCompatible(tp: Type, pt: Type)(implicit ctx: Context) = {
+ val nestedCtx = ctx.fresh.setExploreTyperState
+ isCompatible(normalize(tp, pt)(nestedCtx), pt)(nestedCtx)
+ }
+
+ private def disregardProto(pt: Type)(implicit ctx: Context): Boolean = pt.dealias match {
+ case _: OrType => true
+ case pt => pt.isRef(defn.UnitClass)
+ }
+
+ /** Check that the result type of the current method
+ * fits the given expected result type.
+ */
+ def constrainResult(mt: Type, pt: Type)(implicit ctx: Context): Boolean = pt match {
+ case pt: FunProto =>
+ mt match {
+ case mt: MethodType =>
+ mt.isDependent || constrainResult(mt.resultType, pt.resultType)
+ case _ =>
+ true
+ }
+ case _: ValueTypeOrProto if !disregardProto(pt) =>
+ mt match {
+ case mt: MethodType =>
+ mt.isDependent || isCompatible(normalize(mt, pt), pt)
+ case _ =>
+ isCompatible(mt, pt)
+ }
+ case _: WildcardType =>
+ isCompatible(mt, pt)
+ case _ =>
+ true
+ }
+ }
+
+ object NoViewsAllowed extends Compatibility {
+ override def viewExists(tp: Type, pt: Type)(implicit ctx: Context): Boolean = false
+ }
+
+ /** A trait for prototypes that match all types */
+ trait MatchAlways extends ProtoType {
+ def isMatchedBy(tp1: Type)(implicit ctx: Context) = true
+ def map(tm: TypeMap)(implicit ctx: Context): ProtoType = this
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context): T = x
+ }
+
+ /** A class marking ignored prototypes that can be revealed by `deepenProto` */
+ case class IgnoredProto(ignored: Type) extends UncachedGroundType with MatchAlways {
+ override def deepenProto(implicit ctx: Context): Type = ignored
+ }
+
+ /** A prototype for expressions [] that are part of a selection operation:
+ *
+ * [ ].name: proto
+ */
+ abstract case class SelectionProto(val name: Name, val memberProto: Type, val compat: Compatibility)
+ extends CachedProxyType with ProtoType with ValueTypeOrProto {
+
+ override def isMatchedBy(tp1: Type)(implicit ctx: Context) = {
+ name == nme.WILDCARD || {
+ val mbr = tp1.member(name)
+ def qualifies(m: SingleDenotation) =
+ memberProto.isRef(defn.UnitClass) ||
+ compat.normalizedCompatible(m.info, memberProto)
+ mbr match { // hasAltWith inlined for performance
+ case mbr: SingleDenotation => mbr.exists && qualifies(mbr)
+ case _ => mbr hasAltWith qualifies
+ }
+ }
+ }
+
+ def underlying(implicit ctx: Context) = WildcardType
+
+ def derivedSelectionProto(name: Name, memberProto: Type, compat: Compatibility)(implicit ctx: Context) =
+ if ((name eq this.name) && (memberProto eq this.memberProto) && (compat eq this.compat)) this
+ else SelectionProto(name, memberProto, compat)
+
+ override def equals(that: Any): Boolean = that match {
+ case that: SelectionProto =>
+ (name eq that.name) && (memberProto == that.memberProto) && (compat eq that.compat)
+ case _ =>
+ false
+ }
+
+ def map(tm: TypeMap)(implicit ctx: Context) = derivedSelectionProto(name, tm(memberProto), compat)
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context) = ta(x, memberProto)
+
+ override def deepenProto(implicit ctx: Context) = derivedSelectionProto(name, memberProto.deepenProto, compat)
+
+ override def computeHash = addDelta(doHash(name, memberProto), if (compat eq NoViewsAllowed) 1 else 0)
+ }
+
+ class CachedSelectionProto(name: Name, memberProto: Type, compat: Compatibility) extends SelectionProto(name, memberProto, compat)
+
+ object SelectionProto {
+ def apply(name: Name, memberProto: Type, compat: Compatibility)(implicit ctx: Context): SelectionProto = {
+ val selproto = new CachedSelectionProto(name, memberProto, compat)
+ if (compat eq NoViewsAllowed) unique(selproto) else selproto
+ }
+ }
+
+ /** Create a selection proto-type, but only one level deep;
+ * treat constructors specially
+ */
+ def selectionProto(name: Name, tp: Type, typer: Typer)(implicit ctx: Context) =
+ if (name.isConstructorName) WildcardType
+ else tp match {
+ case tp: UnapplyFunProto => new UnapplySelectionProto(name)
+ case tp => SelectionProto(name, IgnoredProto(tp), typer)
+ }
+
+ /** A prototype for expressions [] that are in some unspecified selection operation
+ *
+ * [].?: ?
+ *
+ * Used to indicate that expression is in a context where the only valid
+ * operation is further selection. In this case, the expression need not be a value.
+ * @see checkValue
+ */
+ @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed)
+
+ /** A prototype for selections in pattern constructors */
+ class UnapplySelectionProto(name: Name) extends SelectionProto(name, WildcardType, NoViewsAllowed)
+
+ trait ApplyingProto extends ProtoType
+
+ /** A prototype for expressions that appear in function position
+ *
+ * [](args): resultType
+ */
+ case class FunProto(args: List[untpd.Tree], resType: Type, typer: Typer)(implicit ctx: Context)
+ extends UncachedGroundType with ApplyingProto {
+ private var myTypedArgs: List[Tree] = Nil
+
+ override def resultType(implicit ctx: Context) = resType
+
+ /** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */
+ private var myTypedArg: SimpleMap[untpd.Tree, Tree] = SimpleMap.Empty
+
+ /** A map recording the typer states in which arguments stored in myTypedArg were typed */
+ private var evalState: SimpleMap[untpd.Tree, TyperState] = SimpleMap.Empty
+
+ def isMatchedBy(tp: Type)(implicit ctx: Context) =
+ typer.isApplicable(tp, Nil, typedArgs, resultType)
+
+ def derivedFunProto(args: List[untpd.Tree] = this.args, resultType: Type, typer: Typer = this.typer) =
+ if ((args eq this.args) && (resultType eq this.resultType) && (typer eq this.typer)) this
+ else new FunProto(args, resultType, typer)
+
+ override def notApplied = WildcardType
+
+ /** Forget the types of any arguments that have been typed producing a constraint in a
+ * typer state that is not yet committed into the one of the current context `ctx`.
+ * This is necessary to avoid "orphan" PolyParams that are referred to from
+ * type variables in the typed arguments, but that are not registered in the
+ * current constraint. A test case is pos/t1756.scala.
+ * @return True if all arguments have types (in particular, no types were forgotten).
+ */
+ def allArgTypesAreCurrent()(implicit ctx: Context): Boolean = {
+ evalState foreachBinding { (arg, tstate) =>
+ if (tstate.uncommittedAncestor.constraint ne ctx.typerState.constraint) {
+ typr.println(i"need to invalidate $arg / ${myTypedArg(arg)}, ${tstate.constraint}, current = ${ctx.typerState.constraint}")
+ myTypedArg = myTypedArg.remove(arg)
+ evalState = evalState.remove(arg)
+ }
+ }
+ myTypedArg.size == args.length
+ }
+
+ private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree)(implicit ctx: Context): Tree = {
+ var targ = myTypedArg(arg)
+ if (targ == null) {
+ targ = typerFn(arg)
+ if (!ctx.reporter.hasPending) {
+ myTypedArg = myTypedArg.updated(arg, targ)
+ evalState = evalState.updated(arg, ctx.typerState)
+ }
+ }
+ targ
+ }
+
+ /** The typed arguments. This takes any arguments already typed using
+ * `typedArg` into account.
+ */
+ def typedArgs: List[Tree] = {
+ if (myTypedArgs.size != args.length)
+ myTypedArgs = args.mapconserve(cacheTypedArg(_, typer.typed(_)))
+ myTypedArgs
+ }
+
+ /** Type single argument and remember the unadapted result in `myTypedArg`.
+ * used to avoid repeated typings of trees when backtracking.
+ */
+ def typedArg(arg: untpd.Tree, formal: Type)(implicit ctx: Context): Tree = {
+ val targ = cacheTypedArg(arg, typer.typedUnadapted(_, formal))
+ typer.adapt(targ, formal, arg)
+ }
+
+ private var myTupled: Type = NoType
+
+ /** The same proto-type but with all arguments combined in a single tuple */
+ def tupled: FunProto = myTupled match {
+ case pt: FunProto =>
+ pt
+ case _ =>
+ myTupled = new FunProto(untpd.Tuple(args) :: Nil, resultType, typer)
+ tupled
+ }
+
+ /** Somebody called the `tupled` method of this prototype */
+ def isTupled: Boolean = myTupled.isInstanceOf[FunProto]
+
+ override def toString = s"FunProto(${args mkString ","} => $resultType)"
+
+ def map(tm: TypeMap)(implicit ctx: Context): FunProto =
+ derivedFunProto(args, tm(resultType), typer)
+
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context): T =
+ ta(ta.foldOver(x, typedArgs.tpes), resultType)
+
+ override def deepenProto(implicit ctx: Context) = derivedFunProto(args, resultType.deepenProto, typer)
+ }
+
+
+ /** A prototype for expressions that appear in function position
+ *
+ * [](args): resultType, where args are known to be typed
+ */
+ class FunProtoTyped(args: List[tpd.Tree], resultType: Type, typer: Typer)(implicit ctx: Context) extends FunProto(args, resultType, typer)(ctx) {
+ override def typedArgs = args
+ }
+
+ /** A prototype for implicitly inferred views:
+ *
+ * []: argType => resultType
+ */
+ abstract case class ViewProto(argType: Type, resType: Type)
+ extends CachedGroundType with ApplyingProto {
+
+ override def resultType(implicit ctx: Context) = resType
+
+ def isMatchedBy(tp: Type)(implicit ctx: Context): Boolean =
+ ctx.typer.isApplicable(tp, argType :: Nil, resultType)
+
+ def derivedViewProto(argType: Type, resultType: Type)(implicit ctx: Context) =
+ if ((argType eq this.argType) && (resultType eq this.resultType)) this
+ else ViewProto(argType, resultType)
+
+ def map(tm: TypeMap)(implicit ctx: Context): ViewProto = derivedViewProto(tm(argType), tm(resultType))
+
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context): T =
+ ta(ta(x, argType), resultType)
+
+ override def deepenProto(implicit ctx: Context) = derivedViewProto(argType, resultType.deepenProto)
+ }
+
+ class CachedViewProto(argType: Type, resultType: Type) extends ViewProto(argType, resultType) {
+ override def computeHash = doHash(argType, resultType)
+ }
+
+ object ViewProto {
+ def apply(argType: Type, resultType: Type)(implicit ctx: Context) =
+ unique(new CachedViewProto(argType, resultType))
+ }
+
+ class UnapplyFunProto(argType: Type, typer: Typer)(implicit ctx: Context) extends FunProto(
+ untpd.TypedSplice(dummyTreeOfType(argType))(ctx) :: Nil, WildcardType, typer)
+
+ /** A prototype for expressions [] that are type-parameterized:
+ *
+ * [] [targs] resultType
+ */
+ case class PolyProto(targs: List[Type], resType: Type) extends UncachedGroundType with ProtoType {
+
+ override def resultType(implicit ctx: Context) = resType
+
+ override def isMatchedBy(tp: Type)(implicit ctx: Context) = {
+ def isInstantiatable(tp: Type) = tp.widen match {
+ case tp: PolyType => tp.paramNames.length == targs.length
+ case _ => false
+ }
+ isInstantiatable(tp) || tp.member(nme.apply).hasAltWith(d => isInstantiatable(d.info))
+ }
+
+ def derivedPolyProto(targs: List[Type], resultType: Type) =
+ if ((targs eq this.targs) && (resType eq this.resType)) this
+ else PolyProto(targs, resType)
+
+ override def notApplied = WildcardType
+
+ def map(tm: TypeMap)(implicit ctx: Context): PolyProto =
+ derivedPolyProto(targs mapConserve tm, tm(resultType))
+
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context): T =
+ ta(ta.foldOver(x, targs), resultType)
+
+ override def deepenProto(implicit ctx: Context) = derivedPolyProto(targs, resultType.deepenProto)
+ }
+
+ /** A prototype for expressions [] that are known to be functions:
+ *
+ * [] _
+ */
+ @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways
+
+ /** A prototype for type constructors that are followed by a type application */
+ @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways
+
+ /** Add all parameters in given polytype `pt` to the constraint's domain.
+ * If the constraint contains already some of these parameters in its domain,
+ * make a copy of the polytype and add the copy's type parameters instead.
+ * Return either the original polytype, or the copy, if one was made.
+ * Also, if `owningTree` is non-empty, add a type variable for each parameter.
+ * @return The added polytype, and the list of created type variables.
+ */
+ def constrained(pt: PolyType, owningTree: untpd.Tree)(implicit ctx: Context): (PolyType, List[TypeVar]) = {
+ val state = ctx.typerState
+ assert(!(ctx.typerState.isCommittable && owningTree.isEmpty),
+ s"inconsistent: no typevars were added to committable constraint ${state.constraint}")
+
+ def newTypeVars(pt: PolyType): List[TypeVar] =
+ for (n <- (0 until pt.paramNames.length).toList)
+ yield new TypeVar(PolyParam(pt, n), state, owningTree, ctx.owner)
+
+ val added =
+ if (state.constraint contains pt) pt.newLikeThis(pt.paramNames, pt.paramBounds, pt.resultType)
+ else pt
+ val tvars = if (owningTree.isEmpty) Nil else newTypeVars(added)
+ ctx.typeComparer.addToConstraint(added, tvars)
+ (added, tvars)
+ }
+
+ /** Same as `constrained(pt, EmptyTree)`, but returns just the created polytype */
+ def constrained(pt: PolyType)(implicit ctx: Context): PolyType = constrained(pt, EmptyTree)._1
+
+ /** The normalized form of a type
+ * - unwraps polymorphic types, tracking their parameters in the current constraint
+ * - skips implicit parameters; if result type depends on implicit parameter,
+ * replace with Wildcard.
+ * - converts non-dependent method types to the corresponding function types
+ * - dereferences parameterless method types
+ * - dereferences nullary method types provided the corresponding function type
+ * is not a subtype of the expected type.
+ * Note: We need to take account of the possibility of inserting a () argument list in normalization. Otherwise, a type with a
+ * def toString(): String
+ * member would not count as a valid solution for ?{toString: String}. This would then lead to an implicit
+ * insertion, with a nice explosion of inference search because of course every implicit result has some sort
+ * of toString method. The problem is solved by dereferencing nullary method types if the corresponding
+ * function type is not compatible with the prototype.
+ */
+ def normalize(tp: Type, pt: Type)(implicit ctx: Context): Type = Stats.track("normalize") {
+ tp.widenSingleton match {
+ case poly: PolyType => normalize(constrained(poly).resultType, pt)
+ case mt: MethodType =>
+ if (mt.isImplicit)
+ if (mt.isDependent)
+ mt.resultType.substParams(mt, mt.paramTypes.map(Function.const(WildcardType)))
+ else mt.resultType
+ else
+ if (mt.isDependent) tp
+ else {
+ val rt = normalize(mt.resultType, pt)
+ pt match {
+ case pt: IgnoredProto => mt
+ case pt: ApplyingProto => mt.derivedMethodType(mt.paramNames, mt.paramTypes, rt)
+ case _ =>
+ val ft = defn.FunctionOf(mt.paramTypes, rt)
+ if (mt.paramTypes.nonEmpty || ft <:< pt) ft else rt
+ }
+ }
+ case et: ExprType => et.resultType
+ case _ => tp
+ }
+ }
+
+ /** Approximate occurrences of parameter types and uninstantiated typevars
+ * by wildcard types.
+ */
+ final def wildApprox(tp: Type, theMap: WildApproxMap = null)(implicit ctx: Context): Type = tp match {
+ case tp: NamedType => // default case, inlined for speed
+ if (tp.symbol.isStatic) tp
+ else tp.derivedSelect(wildApprox(tp.prefix, theMap))
+ case tp: RefinedType => // default case, inlined for speed
+ tp.derivedRefinedType(wildApprox(tp.parent, theMap), tp.refinedName, wildApprox(tp.refinedInfo, theMap))
+ case tp: TypeAlias => // default case, inlined for speed
+ tp.derivedTypeAlias(wildApprox(tp.alias, theMap))
+ case tp @ PolyParam(poly, pnum) =>
+ def unconstrainedApprox = WildcardType(wildApprox(poly.paramBounds(pnum)).bounds)
+ if (ctx.mode.is(Mode.TypevarsMissContext))
+ unconstrainedApprox
+ else
+ ctx.typerState.constraint.entry(tp) match {
+ case bounds: TypeBounds => wildApprox(WildcardType(bounds))
+ case NoType => unconstrainedApprox
+ case inst => wildApprox(inst)
+ }
+ case MethodParam(mt, pnum) =>
+ WildcardType(TypeBounds.upper(wildApprox(mt.paramTypes(pnum))))
+ case tp: TypeVar =>
+ wildApprox(tp.underlying)
+ case tp @ HKApply(tycon, args) =>
+ wildApprox(tycon) match {
+ case _: WildcardType => WildcardType // this ensures we get a * type
+ case tycon1 => tp.derivedAppliedType(tycon1, args.mapConserve(wildApprox(_)))
+ }
+ case tp: AndType =>
+ val tp1a = wildApprox(tp.tp1)
+ val tp2a = wildApprox(tp.tp2)
+ def wildBounds(tp: Type) =
+ if (tp.isInstanceOf[WildcardType]) tp.bounds else TypeBounds.upper(tp)
+ if (tp1a.isInstanceOf[WildcardType] || tp2a.isInstanceOf[WildcardType])
+ WildcardType(wildBounds(tp1a) & wildBounds(tp2a))
+ else
+ tp.derivedAndType(tp1a, tp2a)
+ case tp: OrType =>
+ val tp1a = wildApprox(tp.tp1)
+ val tp2a = wildApprox(tp.tp2)
+ if (tp1a.isInstanceOf[WildcardType] || tp2a.isInstanceOf[WildcardType])
+ WildcardType(tp1a.bounds | tp2a.bounds)
+ else
+ tp.derivedOrType(tp1a, tp2a)
+ case tp: LazyRef =>
+ WildcardType
+ case tp: SelectionProto =>
+ tp.derivedSelectionProto(tp.name, wildApprox(tp.memberProto), NoViewsAllowed)
+ case tp: ViewProto =>
+ tp.derivedViewProto(wildApprox(tp.argType), wildApprox(tp.resultType))
+ case _: ThisType | _: BoundType | NoPrefix => // default case, inlined for speed
+ tp
+ case _ =>
+ (if (theMap != null) theMap else new WildApproxMap).mapOver(tp)
+ }
+
+ @sharable object AssignProto extends UncachedGroundType with MatchAlways
+
+ private[ProtoTypes] class WildApproxMap(implicit ctx: Context) extends TypeMap {
+ def apply(tp: Type) = wildApprox(tp, this)
+ }
+
+ /** Dummy tree to be used as an argument of a FunProto or ViewProto type */
+ object dummyTreeOfType {
+ def apply(tp: Type): Tree = untpd.Literal(Constant(null)) withTypeUnchecked tp
+ def unapply(tree: Tree): Option[Type] = tree match {
+ case Literal(Constant(null)) => Some(tree.typeOpt)
+ case _ => None
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
new file mode 100644
index 000000000..2413c0c22
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
@@ -0,0 +1,108 @@
+package dotty.tools.dotc
+package typer
+
+import core._
+import Contexts._
+import Types._
+import Symbols._
+import Decorators._
+import typer.ProtoTypes._
+import ast.{tpd, untpd}
+import ast.Trees._
+import scala.util.control.NonFatal
+import util.Positions.Position
+import config.Printers.typr
+
+/** A version of Typer that keeps all symbols defined and referenced in a
+ * previously typed tree.
+ *
+ * All definition nodes keep their symbols. All leaf nodes for idents, selects,
+ * and TypeTrees keep their types. Indexing is a no-op.
+ *
+ * Otherwise, everything is as in Typer.
+ */
+class ReTyper extends Typer {
+ import tpd._
+
+ /** Checks that the given tree has been typed */
+ protected def promote(tree: untpd.Tree)(implicit ctx: Context): tree.ThisTree[Type] = {
+ assert(tree.hasType, i"$tree ${tree.getClass} ${tree.uniqueId}")
+ tree.withType(tree.typeOpt)
+ }
+
+ override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree =
+ promote(tree)
+
+ override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
+ assert(tree.hasType, tree)
+ val qual1 = typed(tree.qualifier, AnySelectionProto)
+ untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt)
+ }
+
+ override def typedLiteral(tree: untpd.Literal)(implicit ctc: Context): Literal =
+ promote(tree)
+
+ override def typedThis(tree: untpd.This)(implicit ctx: Context): Tree =
+ promote(tree)
+
+ override def typedSuper(tree: untpd.Super, pt: Type)(implicit ctx: Context): Tree =
+ promote(tree)
+
+ override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): TypeTree =
+ promote(tree)
+
+ override def typedBind(tree: untpd.Bind, pt: Type)(implicit ctx: Context): Bind = {
+ assert(tree.hasType)
+ val body1 = typed(tree.body, pt)
+ untpd.cpy.Bind(tree)(tree.name, body1).withType(tree.typeOpt)
+ }
+
+ override def typedUnApply(tree: untpd.UnApply, selType: Type)(implicit ctx: Context): UnApply = {
+ val fun1 = typedExpr(tree.fun, AnyFunctionProto)
+ val implicits1 = tree.implicits.map(typedExpr(_))
+ val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.tpe))
+ untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe)
+ }
+
+ override def localDummy(cls: ClassSymbol, impl: untpd.Template)(implicit ctx: Context) = impl.symbol
+
+ override def retrieveSym(tree: untpd.Tree)(implicit ctx: Context): Symbol = tree.symbol
+ override def symbolOfTree(tree: untpd.Tree)(implicit ctx: Context): Symbol = tree.symbol
+
+ override def localTyper(sym: Symbol) = this
+
+ override def index(trees: List[untpd.Tree])(implicit ctx: Context) = ctx
+
+ override def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType)(fallBack: (Tree, TyperState) => Tree)(implicit ctx: Context): Tree =
+ fallBack(tree, ctx.typerState)
+
+ override def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(implicit ctx: Context): Unit = ()
+
+ override def ensureConstrCall(cls: ClassSymbol, parents: List[Tree])(implicit ctx: Context): List[Tree] =
+ parents
+
+ override def encodeName(tree: untpd.NameTree)(implicit ctx: Context) = tree
+
+ override def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(implicit ctx: Context): Tree = fun.tpe match {
+ case mt @ MethodType(_, formals) =>
+ val args: List[Tree] = tree.args.zipWithConserve(formals)(typedExpr(_, _)).asInstanceOf[List[Tree]]
+ assignType(untpd.cpy.Apply(tree)(fun, args), fun, args)
+ case _ =>
+ super.handleUnexpectedFunType(tree, fun)
+ }
+
+ override def typedUnadapted(tree: untpd.Tree, pt: Type)(implicit ctx: Context) =
+ try super.typedUnadapted(tree, pt)
+ catch {
+ case NonFatal(ex) =>
+ if (ctx.isAfterTyper)
+ println(i"exception while typing $tree of class ${tree.getClass} # ${tree.uniqueId}")
+ throw ex
+ }
+
+ override def checkVariance(tree: Tree)(implicit ctx: Context) = ()
+ override def inferView(from: Tree, to: Type)(implicit ctx: Context): Implicits.SearchResult =
+ Implicits.NoImplicitMatches
+ override def checkCanEqual(ltp: Type, rtp: Type, pos: Position)(implicit ctx: Context): Unit = ()
+ override def inlineExpansion(mdef: DefDef)(implicit ctx: Context): List[Tree] = mdef :: Nil
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
new file mode 100644
index 000000000..46bdbf3b3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -0,0 +1,1526 @@
+package dotty.tools.dotc
+package typer
+
+import transform._
+import core._
+import config._
+import Symbols._, SymDenotations._, Types._, Contexts._, Decorators._, Flags._, Names._, NameOps._
+import StdNames._, Denotations._, Scopes._, Constants.Constant, SymUtils._
+import Annotations._
+import util.Positions._
+import scala.collection.{ mutable, immutable }
+import ast._
+import Trees._
+import TreeTransforms._
+import util.DotClass
+import scala.util.{Try, Success, Failure}
+import config.{ScalaVersion, NoScalaVersion}
+import Decorators._
+import typer.ErrorReporting._
+import DenotTransformers._
+import ValueClasses.isDerivedValueClass
+
+object RefChecks {
+ import tpd._
+
+ private def isDefaultGetter(name: Name): Boolean =
+ name.isTermName && name.asTermName.defaultGetterIndex >= 0
+
+ private val defaultMethodFilter = new NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = isDefaultGetter(name)
+ }
+
+ /** Only one overloaded alternative is allowed to define default arguments */
+ private def checkOverloadedRestrictions(clazz: Symbol)(implicit ctx: Context): Unit = {
+ // Using the default getters (such as methodName$default$1) as a cheap way of
+ // finding methods with default parameters. This way, we can limit the members to
+ // those with the DEFAULTPARAM flag, and infer the methods. Looking for the methods
+ // directly requires inspecting the parameter list of every one. That modification
+ // shaved 95% off the time spent in this method.
+
+ for (
+ defaultGetterClass <- List(clazz, clazz.companionModule.moduleClass);
+ if defaultGetterClass.isClass
+ ) {
+ val defaultGetterNames = defaultGetterClass.asClass.memberNames(defaultMethodFilter)
+ val defaultMethodNames = defaultGetterNames map (_.asTermName.defaultGetterToMethod)
+
+ for (name <- defaultMethodNames) {
+ val methods = clazz.info.member(name).alternatives.map(_.symbol)
+ val haveDefaults = methods.filter(_.hasDefaultParams)
+ if (haveDefaults.length > 1) {
+ val owners = haveDefaults map (_.owner)
+ // constructors of different classes are allowed to have defaults
+ if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size)
+ ctx.error(
+ "in " + clazz +
+ ", multiple overloaded alternatives of " + haveDefaults.head +
+ " define default arguments" + (
+ if (owners.forall(_ == clazz)) "."
+ else ".\nThe members with defaults are defined in " + owners.map(_.showLocated).mkString("", " and ", ".")),
+ clazz.pos)
+ }
+ }
+ }
+
+ // Check for doomed attempt to overload applyDynamic
+ if (clazz derivesFrom defn.DynamicClass) {
+ for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.symbol.typeParams.length)) {
+ ctx.error("implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)",
+ m1.symbol.pos)
+ }
+ }
+ }
+
+ /** Check that self type of this class conforms to self types of parents.
+ * and required classes.
+ */
+ private def checkParents(cls: Symbol)(implicit ctx: Context): Unit = cls.info match {
+ case cinfo: ClassInfo =>
+ def checkSelfConforms(other: TypeRef, category: String, relation: String) = {
+ val otherSelf = other.givenSelfType.asSeenFrom(cls.thisType, other.classSymbol)
+ if (otherSelf.exists && !(cinfo.selfType <:< otherSelf))
+ ctx.error(ex"$category: self type ${cinfo.selfType} of $cls does not conform to self type $otherSelf of $relation ${other.classSymbol}", cls.pos)
+ }
+ for (parent <- cinfo.classParents)
+ checkSelfConforms(parent, "illegal inheritance", "parent")
+ for (reqd <- cinfo.givenSelfType.classSymbols)
+ checkSelfConforms(reqd.typeRef, "missing requirement", "required")
+ case _ =>
+ }
+
+ /** Check that a class and its companion object to not both define
+ * a class or module with same name
+ */
+ private def checkCompanionNameClashes(cls: Symbol)(implicit ctx: Context): Unit =
+ if (!(cls.owner is ModuleClass)) {
+ val other = cls.owner.linkedClass.info.decl(cls.name)
+ if (other.symbol.isClass)
+ ctx.error(s"name clash: ${cls.owner} defines $cls" + "\n" +
+ s"and its companion ${cls.owner.companionModule} also defines $other",
+ cls.pos)
+ }
+
+ // Override checking ------------------------------------------------------------
+
+ /** 1. Check all members of class `clazz` for overriding conditions.
+ * That is for overriding member M and overridden member O:
+ *
+ * 1.1. M must have the same or stronger access privileges as O.
+ * 1.2. O must not be final.
+ * 1.3. O is deferred, or M has `override` modifier.
+ * 1.4. If O is stable, then so is M.
+ * // @M: LIFTED 1.5. Neither M nor O are a parameterized type alias
+ * 1.6. If O is a type alias, then M is an alias of O.
+ * 1.7. If O is an abstract type then
+ * 1.7.1 either M is an abstract type, and M's bounds are sharper than O's bounds.
+ * or M is a type alias or class which conforms to O's bounds.
+ * 1.7.2 higher-order type arguments must respect bounds on higher-order type parameters -- @M
+ * (explicit bounds and those implied by variance annotations) -- @see checkKindBounds
+ * 1.8. If O and M are values, then
+ * 1.8.1 M's type is a subtype of O's type, or
+ * 1.8.2 M is of type []S, O is of type ()T and S <: T, or
+ * 1.8.3 M is of type ()S, O is of type []T and S <: T, or
+ * 1.9. If M is a macro def, O cannot be deferred unless there's a concrete method overriding O.
+ * 1.10. If M is not a macro def, O cannot be a macro def.
+ * 2. Check that only abstract classes have deferred members
+ * 3. Check that concrete classes do not have deferred definitions
+ * that are not implemented in a subclass.
+ * 4. Check that every member with an `override` modifier
+ * overrides some other member.
+ * TODO check that classes are not overridden
+ * TODO This still needs to be cleaned up; the current version is a staright port of what was there
+ * before, but it looks too complicated and method bodies are far too large.
+ */
+ private def checkAllOverrides(clazz: Symbol)(implicit ctx: Context): Unit = {
+ val self = clazz.thisType
+ var hasErrors = false
+
+ case class MixinOverrideError(member: Symbol, msg: String)
+
+ val mixinOverrideErrors = new mutable.ListBuffer[MixinOverrideError]()
+
+ def printMixinOverrideErrors(): Unit = {
+ mixinOverrideErrors.toList match {
+ case List() =>
+ case List(MixinOverrideError(_, msg)) =>
+ ctx.error(msg, clazz.pos)
+ case MixinOverrideError(member, msg) :: others =>
+ val others1 = others.map(_.member).filter(_.name != member.name).distinct
+ def othersMsg = {
+ val others1 = others.map(_.member)
+ .filter(_.name != member.name)
+ .map(_.show).distinct
+ if (others1.isEmpty) ""
+ else i";\n other members with override errors are:: $others1%, %"
+ }
+ ctx.error(msg + othersMsg, clazz.pos)
+ }
+ }
+
+ def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz)
+ def infoStringWithLocation(sym: Symbol) = infoString0(sym, true)
+
+ def infoString0(sym: Symbol, showLocation: Boolean) = {
+ val sym1 = sym.underlyingSymbol
+ def info = self.memberInfo(sym1)
+ i"${if (showLocation) sym1.showLocated else sym1}${
+ if (sym1.isAliasType) i", which equals ${info.bounds.hi}"
+ else if (sym1.isAbstractType) i" with bounds$info"
+ else if (sym1.is(Module)) ""
+ else if (sym1.isTerm) i" of type $info"
+ else ""
+ }"
+ }
+
+ /* Check that all conditions for overriding `other` by `member`
+ * of class `clazz` are met.
+ */
+ def checkOverride(member: Symbol, other: Symbol): Unit = {
+ def memberTp = self.memberInfo(member)
+ def otherTp = self.memberInfo(other)
+
+ ctx.debuglog("Checking validity of %s overriding %s".format(member.showLocated, other.showLocated))
+
+ def noErrorType = !memberTp.isErroneous && !otherTp.isErroneous
+
+ def overrideErrorMsg(msg: String): String = {
+ val isConcreteOverAbstract =
+ (other.owner isSubClass member.owner) && other.is(Deferred) && !member.is(Deferred)
+ val addendum =
+ if (isConcreteOverAbstract)
+ ";\n (Note that %s is abstract,\n and is therefore overridden by concrete %s)".format(
+ infoStringWithLocation(other),
+ infoStringWithLocation(member))
+ else if (ctx.settings.debug.value)
+ err.typeMismatchMsg(memberTp, otherTp)
+ else ""
+
+ "overriding %s;\n %s %s%s".format(
+ infoStringWithLocation(other), infoString(member), msg, addendum)
+ }
+
+ def emitOverrideError(fullmsg: String) =
+ if (!(hasErrors && member.is(Synthetic) && member.is(Module))) {
+ // suppress errors relating toi synthetic companion objects if other override
+ // errors (e.g. relating to the companion class) have already been reported.
+ if (member.owner == clazz) ctx.error(fullmsg, member.pos)
+ else mixinOverrideErrors += new MixinOverrideError(member, fullmsg)
+ hasErrors = true
+ }
+
+ def overrideError(msg: String) = {
+ if (noErrorType)
+ emitOverrideError(overrideErrorMsg(msg))
+ }
+
+ def autoOverride(sym: Symbol) =
+ sym.is(Synthetic) && (
+ desugar.isDesugaredCaseClassMethodName(member.name) || // such names are added automatically, can't have an override preset.
+ sym.is(Module)) // synthetic companion
+
+ def overrideAccessError() = {
+ ctx.log(i"member: ${member.showLocated} ${member.flags}") // DEBUG
+ ctx.log(i"other: ${other.showLocated} ${other.flags}") // DEBUG
+ val otherAccess = (other.flags & AccessFlags).toString
+ overrideError("has weaker access privileges; it should be " +
+ (if (otherAccess == "") "public" else "at least " + otherAccess))
+ }
+
+ def compatibleTypes =
+ if (member.isType) { // intersection of bounds to refined types must be nonempty
+ member.is(BaseTypeArg) ||
+ (memberTp frozen_<:< otherTp) || {
+ val jointBounds = (memberTp.bounds & otherTp.bounds).bounds
+ jointBounds.lo frozen_<:< jointBounds.hi
+ }
+ }
+ else
+ isDefaultGetter(member.name) || // default getters are not checked for compatibility
+ memberTp.overrides(otherTp)
+
+ def domain(sym: Symbol): Set[Name] = sym.info.namedTypeParams.map(_.name)
+
+ //Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG
+
+ // return if we already checked this combination elsewhere
+ if (member.owner != clazz) {
+ def deferredCheck = member.is(Deferred) || !other.is(Deferred)
+ def subOther(s: Symbol) = s derivesFrom other.owner
+ def subMember(s: Symbol) = s derivesFrom member.owner
+
+ if (subOther(member.owner) && deferredCheck) {
+ //Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG
+ return
+ }
+ val parentSymbols = clazz.info.parents.map(_.typeSymbol)
+ if (parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck)) {
+ //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG
+ return
+ }
+ if (parentSymbols forall (p => subOther(p) == subMember(p))) {
+ //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG
+ return
+ }
+ }
+
+ /* Is the intersection between given two lists of overridden symbols empty? */
+ def intersectionIsEmpty(syms1: Iterator[Symbol], syms2: Iterator[Symbol]) = {
+ val set2 = syms2.toSet
+ !(syms1 exists (set2 contains _))
+ }
+
+ // o: public | protected | package-protected (aka java's default access)
+ // ^-may be overridden by member with access privileges-v
+ // m: public | public/protected | public/protected/package-protected-in-same-package-as-o
+
+ if (member.is(Private)) // (1.1)
+ overrideError("has weaker access privileges; it should not be private")
+
+ // todo: align accessibility implication checking with isAccessible in Contexts
+ val ob = other.accessBoundary(member.owner)
+ val mb = member.accessBoundary(member.owner)
+ def isOverrideAccessOK = (
+ (member.flags & AccessFlags).isEmpty // member is public
+ || // - or -
+ (!other.is(Protected) || member.is(Protected)) && // if o is protected, so is m, and
+ (ob.isContainedIn(mb) || other.is(JavaProtected)) // m relaxes o's access boundary,
+ // or o is Java defined and protected (see #3946)
+ )
+ if (!isOverrideAccessOK) {
+ overrideAccessError()
+ } else if (other.isClass) {
+ overrideError("cannot be used here - class definitions cannot be overridden")
+ } else if (!other.is(Deferred) && member.isClass) {
+ overrideError("cannot be used here - classes can only override abstract types")
+ } else if (other.isEffectivelyFinal) { // (1.2)
+ overrideError(i"cannot override final member ${other.showLocated}")
+ } else if (!other.is(Deferred) &&
+ !isDefaultGetter(other.name) &&
+ !member.isAnyOverride) {
+ // (*) Exclusion for default getters, fixes SI-5178. We cannot assign the Override flag to
+ // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
+ if (autoOverride(member))
+ member.setFlag(Override)
+ else if (member.owner != clazz && other.owner != clazz && !(other.owner derivesFrom member.owner))
+ emitOverrideError(
+ clazz + " inherits conflicting members:\n "
+ + infoStringWithLocation(other) + " and\n " + infoStringWithLocation(member)
+ + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)")
+ else
+ overrideError("needs `override' modifier")
+ } else if (other.is(AbsOverride) && other.isIncompleteIn(clazz) && !member.is(AbsOverride)) {
+ overrideError("needs `abstract override' modifiers")
+ } else if (member.is(Override) && other.is(Accessor) &&
+ other.accessedFieldOrGetter.is(Mutable, butNot = Lazy)) {
+ // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
+ // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches.
+ if (!ctx.settings.overrideVars.value)
+ overrideError("cannot override a mutable variable")
+ } else if (member.isAnyOverride &&
+ !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) &&
+ !member.is(Deferred) && !other.is(Deferred) &&
+ intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) {
+ overrideError("cannot override a concrete member without a third member that's overridden by both " +
+ "(this rule is designed to prevent ``accidental overrides'')")
+ } else if (other.isStable && !member.isStable) { // (1.4)
+ overrideError("needs to be a stable, immutable value")
+ } else if (member.is(ModuleVal) && !other.isRealMethod && !other.is(Deferred | Lazy)) {
+ overrideError("may not override a concrete non-lazy value")
+ } else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy)) {
+ overrideError("may not override a non-lazy value")
+ } else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) {
+ overrideError("must be declared lazy to override a lazy value")
+ } else if (other.is(Deferred) && member.is(Macro) && member.extendedOverriddenSymbols.forall(_.is(Deferred))) { // (1.9)
+ overrideError("cannot be used here - term macros cannot override abstract methods")
+ } else if (other.is(Macro) && !member.is(Macro)) { // (1.10)
+ overrideError("cannot be used here - only term macros can override term macros")
+ } else if (!compatibleTypes) {
+ overrideError("has incompatible type" + err.whyNoMatchStr(memberTp, otherTp))
+ } else if (member.isType && domain(member) != domain(other)) {
+ overrideError("has different named type parameters: "+
+ i"[${domain(member).toList}%, %] instead of [${domain(other).toList}%, %]")
+ } else {
+ checkOverrideDeprecated()
+ }
+ }
+
+ /* TODO enable; right now the annotation is scala-private, so cannot be seen
+ * here.
+ */
+ def checkOverrideDeprecated() = { /*
+ if (other.hasDeprecatedOverridingAnnotation) {
+ val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
+ val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(member.pos, msg)
+ }*/
+ }
+
+ try {
+ val opc = new OverridingPairs.Cursor(clazz)
+ while (opc.hasNext) {
+ checkOverride(opc.overriding, opc.overridden)
+ opc.next()
+ }
+ } catch {
+ case ex: MergeError =>
+ val addendum = ex.tp1 match {
+ case tp1: ClassInfo =>
+ "\n(Note that having same-named member classes in types of a mixin composition is no longer allowed)"
+ case _ => ""
+ }
+ ctx.error(ex.getMessage + addendum, clazz.pos)
+ }
+ printMixinOverrideErrors()
+
+ // Verifying a concrete class has nothing unimplemented.
+ if (!clazz.is(AbstractOrTrait)) {
+ val abstractErrors = new mutable.ListBuffer[String]
+ def abstractErrorMessage =
+ // a little formatting polish
+ if (abstractErrors.size <= 2) abstractErrors mkString " "
+ else abstractErrors.tail.mkString(abstractErrors.head + ":\n", "\n", "")
+
+ def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = {
+ def prelude = (
+ if (clazz.isAnonymousClass || clazz.is(Module)) "object creation impossible"
+ else if (mustBeMixin) clazz + " needs to be a mixin"
+ else clazz + " needs to be abstract") + ", since"
+
+ if (abstractErrors.isEmpty) abstractErrors ++= List(prelude, msg)
+ else abstractErrors += msg
+ }
+
+ def hasJavaErasedOverriding(sym: Symbol): Boolean =
+ !ctx.erasurePhase.exists || // can't do the test, assume the best
+ ctx.atPhase(ctx.erasurePhase.next) { implicit ctx =>
+ clazz.info.nonPrivateMember(sym.name).hasAltWith { alt =>
+ alt.symbol.is(JavaDefined, butNot = Deferred) &&
+ !sym.owner.derivesFrom(alt.symbol.owner) &&
+ alt.matches(sym)
+ }
+ }
+
+ def ignoreDeferred(member: SingleDenotation) =
+ member.isType ||
+ member.symbol.is(SuperAccessor) || // not yet synthesized
+ member.symbol.is(JavaDefined) && hasJavaErasedOverriding(member.symbol)
+
+ // 2. Check that only abstract classes have deferred members
+ def checkNoAbstractMembers(): Unit = {
+ // Avoid spurious duplicates: first gather any missing members.
+ val missing = clazz.thisType.abstractTermMembers.filterNot(ignoreDeferred)
+ // Group missing members by the name of the underlying symbol,
+ // to consolidate getters and setters.
+ val grouped: Map[Name, Seq[SingleDenotation]] = missing groupBy (_.symbol.underlyingSymbol.name)
+ // Dotty deviation: Added type annotation for `grouped`.
+ // The inferred type is Map[Symbol#ThisName, Seq[SingleDenotation]]
+ // but then the definition of isMultiple fails with an error:
+ // RefChecks.scala:379: error: type mismatch:
+ // found : underlying.ThisName
+ // required: dotty.tools.dotc.core.Symbols.Symbol#ThisName
+ //
+ // val isMultiple = grouped.getOrElse(underlying.name(ctx), Nil).size > 1
+ // ^
+ // As far as I can see, the complaint is correct, even under the
+ // old reading where Symbol#ThisName means x.ThisName forSome { val x }
+
+ val missingMethods = grouped.toList flatMap {
+ case (name, syms) =>
+ val withoutSetters = syms filterNot (_.symbol.isSetter)
+ if (withoutSetters.nonEmpty) withoutSetters else syms
+ }
+
+ def stubImplementations: List[String] = {
+ // Grouping missing methods by the declaring class
+ val regrouped = missingMethods.groupBy(_.symbol.owner).toList
+ def membersStrings(members: List[SingleDenotation]) =
+ members.sortBy(_.symbol.name.toString).map(_.showDcl + " = ???")
+
+ if (regrouped.tail.isEmpty)
+ membersStrings(regrouped.head._2)
+ else (regrouped.sortBy("" + _._1.name) flatMap {
+ case (owner, members) =>
+ ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ ""
+ }).init
+ }
+
+ // If there are numerous missing methods, we presume they are aware of it and
+ // give them a nicely formatted set of method signatures for implementing.
+ if (missingMethods.size > 1) {
+ abstractClassError(false, "it has " + missingMethods.size + " unimplemented members.")
+ val preface =
+ """|/** As seen from %s, the missing signatures are as follows.
+ | * For convenience, these are usable as stub implementations.
+ | */
+ |""".stripMargin.format(clazz)
+ abstractErrors += stubImplementations.map(" " + _ + "\n").mkString(preface, "", "")
+ return
+ }
+
+ for (member <- missing) {
+ val memberSym = member.symbol
+ def undefined(msg: String) =
+ abstractClassError(false, s"${member.showDcl} is not defined $msg")
+ val underlying = memberSym.underlyingSymbol
+
+ // Give a specific error message for abstract vars based on why it fails:
+ // It could be unimplemented, have only one accessor, or be uninitialized.
+ if (underlying.is(Mutable)) {
+ val isMultiple = grouped.getOrElse(underlying.name(ctx), Nil).size > 1
+
+ // If both getter and setter are missing, squelch the setter error.
+ if (memberSym.isSetter && isMultiple) ()
+ else undefined(
+ if (memberSym.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)"
+ else if (memberSym.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)"
+ else err.abstractVarMessage(memberSym))
+ } else if (underlying.is(Method)) {
+ // If there is a concrete method whose name matches the unimplemented
+ // abstract method, and a cursory examination of the difference reveals
+ // something obvious to us, let's make it more obvious to them.
+ val abstractParams = underlying.info.firstParamTypes
+ val matchingName = clazz.info.nonPrivateMember(underlying.name).alternatives
+ val matchingArity = matchingName filter { m =>
+ !m.symbol.is(Deferred) &&
+ m.info.firstParamTypes.length == abstractParams.length
+ }
+
+ matchingArity match {
+ // So far so good: only one candidate method
+ case concrete :: Nil =>
+ val mismatches =
+ abstractParams.zip(concrete.info.firstParamTypes)
+ .filterNot { case (x, y) => x =:= y }
+ mismatches match {
+ // Only one mismatched parameter: say something useful.
+ case (pa, pc) :: Nil =>
+ val abstractSym = pa.typeSymbol
+ val concreteSym = pc.typeSymbol
+ def subclassMsg(c1: Symbol, c2: Symbol) =
+ s": ${c1.showLocated} is a subclass of ${c2.showLocated}, but method parameter types must match exactly."
+ val addendum =
+ if (abstractSym == concreteSym) {
+ val paArgs = pa.argInfos
+ val pcArgs = pc.argInfos
+ val paConstr = pa.withoutArgs(paArgs)
+ val pcConstr = pc.withoutArgs(pcArgs)
+ (paConstr, pcConstr) match {
+ case (TypeRef(pre1, _), TypeRef(pre2, _)) =>
+ if (pre1 =:= pre2) ": their type parameters differ"
+ else ": their prefixes (i.e. enclosing instances) differ"
+ case _ =>
+ ""
+ }
+ } else if (abstractSym isSubClass concreteSym)
+ subclassMsg(abstractSym, concreteSym)
+ else if (concreteSym isSubClass abstractSym)
+ subclassMsg(concreteSym, abstractSym)
+ else ""
+
+ undefined(s"\n(Note that ${pa.show} does not match ${pc.show}$addendum)")
+ case xs =>
+ undefined(s"\n(The class implements a member with a different type: ${concrete.showDcl})")
+ }
+ case Nil =>
+ undefined("")
+ case concretes =>
+ undefined(s"\n(The class implements members with different types: ${concretes.map(_.showDcl)}%\n %)")
+ }
+ } else undefined("")
+ }
+ }
+
+ // 3. Check that concrete classes do not have deferred definitions
+ // that are not implemented in a subclass.
+ // Note that this is not the same as (2); In a situation like
+ //
+ // class C { def m: Int = 0}
+ // class D extends C { def m: Int }
+ //
+ // (3) is violated but not (2).
+ def checkNoAbstractDecls(bc: Symbol): Unit = {
+ for (decl <- bc.info.decls) {
+ if (decl.is(Deferred) && !ignoreDeferred(decl)) {
+ val impl = decl.matchingMember(clazz.thisType)
+ if (impl == NoSymbol || (decl.owner isSubClass impl.owner)) {
+ val impl1 = clazz.thisType.nonPrivateMember(decl.name) // DEBUG
+ ctx.log(i"${impl1}: ${impl1.info}") // DEBUG
+ ctx.log(i"${clazz.thisType.memberInfo(decl)}") // DEBUG
+ abstractClassError(false, "there is a deferred declaration of " + infoString(decl) +
+ " which is not implemented in a subclass" + err.abstractVarMessage(decl))
+ }
+ }
+ }
+ if (bc.asClass.superClass.is(Abstract))
+ checkNoAbstractDecls(bc.asClass.superClass)
+ }
+
+ checkNoAbstractMembers()
+ if (abstractErrors.isEmpty)
+ checkNoAbstractDecls(clazz)
+
+ if (abstractErrors.nonEmpty)
+ ctx.error(abstractErrorMessage, clazz.pos)
+ } else if (clazz.is(Trait) && !(clazz derivesFrom defn.AnyValClass)) {
+ // For non-AnyVal classes, prevent abstract methods in interfaces that override
+ // final members in Object; see #4431
+ for (decl <- clazz.info.decls) {
+ // Have to use matchingSymbol, not a method involving overridden symbols,
+ // because the scala type system understands that an abstract method here does not
+ // override a concrete method in Object. The jvm, however, does not.
+ val overridden = decl.matchingDecl(defn.ObjectClass, defn.ObjectType)
+ if (overridden.is(Final))
+ ctx.error("trait cannot redefine final method from class AnyRef", decl.pos)
+ }
+ }
+
+ /* Returns whether there is a symbol declared in class `inclazz`
+ * (which must be different from `clazz`) whose name and type
+ * seen as a member of `class.thisType` matches `member`'s.
+ */
+ def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = {
+
+ def isSignatureMatch(sym: Symbol) = !sym.isTerm ||
+ clazz.thisType.memberInfo(sym).matchesLoosely(member.info)
+
+ /* The rules for accessing members which have an access boundary are more
+ * restrictive in java than scala. Since java has no concept of package nesting,
+ * a member with "default" (package-level) access can only be accessed by members
+ * in the exact same package. Example:
+ *
+ * package a.b;
+ * public class JavaClass { void foo() { } }
+ *
+ * The member foo() can be accessed only from members of package a.b, and not
+ * nested packages like a.b.c. In the analogous scala class:
+ *
+ * package a.b
+ * class ScalaClass { private[b] def foo() = () }
+ *
+ * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic
+ * is restricting the set of matching signatures according to the above semantics.
+ */
+ def javaAccessCheck(sym: Symbol) = (
+ !inclazz.is(JavaDefined) // not a java defined member
+ || !sym.privateWithin.exists // no access boundary
+ || sym.is(Protected) // marked protected in java, thus accessible to subclasses
+ || sym.privateWithin == member.enclosingPackageClass // exact package match
+ )
+ def classDecls = inclazz.info.nonPrivateDecl(member.name)
+
+ (inclazz != clazz) &&
+ classDecls.hasAltWith(d => isSignatureMatch(d.symbol) && javaAccessCheck(d.symbol))
+ }
+
+ // 4. Check that every defined member with an `override` modifier overrides some other member.
+ for (member <- clazz.info.decls)
+ if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
+ // for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
+
+ val nonMatching = clazz.info.member(member.name).altsWith(alt => alt.owner != clazz && !alt.is(Final))
+ def issueError(suffix: String) =
+ ctx.error(i"$member overrides nothing$suffix", member.pos)
+ nonMatching match {
+ case Nil =>
+ issueError("")
+ case ms =>
+ val superSigs = ms.map(_.showDcl).mkString("\n")
+ issueError(s".\nNote: the super classes of ${member.owner} contain the following, non final members named ${member.name}:\n${superSigs}")
+ }
+ member.resetFlag(Override)
+ member.resetFlag(AbsOverride)
+ }
+ }
+
+ // Note: if a symbol has both @deprecated and @migration annotations and both
+ // warnings are enabled, only the first one checked here will be emitted.
+ // I assume that's a consequence of some code trying to avoid noise by suppressing
+ // warnings after the first, but I think it'd be better if we didn't have to
+ // arbitrarily choose one as more important than the other.
+ private def checkUndesiredProperties(sym: Symbol, pos: Position)(implicit ctx: Context): Unit = {
+ // If symbol is deprecated, and the point of reference is not enclosed
+ // in either a deprecated member or a scala bridge method, issue a warning.
+ if (sym.isDeprecated && !ctx.owner.ownersIterator.exists(_.isDeprecated)) {
+ ctx.deprecationWarning("%s%s is deprecated%s".format(
+ sym, sym.showLocated, sym.deprecationMessage map (": " + _) getOrElse "", pos))
+ }
+ // Similar to deprecation: check if the symbol is marked with @migration
+ // indicating it has changed semantics between versions.
+ if (sym.hasAnnotation(defn.MigrationAnnot) && ctx.settings.Xmigration.value != NoScalaVersion) {
+ val symVersion: scala.util.Try[ScalaVersion] = sym.migrationVersion.get
+ val changed = symVersion match {
+ case scala.util.Success(v) =>
+ ctx.settings.Xmigration.value < v
+ case Failure(ex) =>
+ ctx.warning(s"${sym.showLocated} has an unparsable version number: ${ex.getMessage()}", pos)
+ false
+ }
+ if (changed)
+ ctx.warning(s"${sym.showLocated} has changed semantics in version $symVersion:\n${sym.migrationMessage.get}")
+ }
+ /* (Not enabled yet)
+ * See an explanation of compileTimeOnly in its scaladoc at scala.annotation.compileTimeOnly.
+ *
+ if (sym.isCompileTimeOnly) {
+ def defaultMsg =
+ sm"""Reference to ${sym.fullLocationString} should not have survived past type checking,
+ |it should have been processed and eliminated during expansion of an enclosing macro."""
+ // The getOrElse part should never happen, it's just here as a backstop.
+ ctx.error(sym.compileTimeOnlyMessage getOrElse defaultMsg, pos)
+ }*/
+ }
+
+ /** Check that a deprecated val or def does not override a
+ * concrete, non-deprecated method. If it does, then
+ * deprecation is meaningless.
+ */
+ private def checkDeprecatedOvers(tree: Tree)(implicit ctx: Context): Unit = {
+ val symbol = tree.symbol
+ if (symbol.isDeprecated) {
+ val concrOvers =
+ symbol.allOverriddenSymbols.filter(sym =>
+ !sym.isDeprecated && !sym.is(Deferred))
+ if (!concrOvers.isEmpty)
+ ctx.deprecationWarning(
+ symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
+ concrOvers.map(_.name.decode).mkString(" ", ", ", ""), tree.pos)
+ }
+ }
+
+ /** Verify classes extending AnyVal meet the requirements */
+ private def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(implicit ctx: Context) = {
+ def checkValueClassMember(stat: Tree) = stat match {
+ case _: ValDef if !stat.symbol.is(ParamAccessor) =>
+ ctx.error(s"value class may not define non-parameter field", stat.pos)
+ case _: DefDef if stat.symbol.isConstructor =>
+ ctx.error(s"value class may not define secondary constructor", stat.pos)
+ case _: MemberDef | _: Import | EmptyTree =>
+ // ok
+ case _ =>
+ ctx.error(s"value class may not contain initialization statements", stat.pos)
+ }
+ if (isDerivedValueClass(clazz)) {
+ if (clazz.is(Trait))
+ ctx.error("Only classes (not traits) are allowed to extend AnyVal", clazz.pos)
+ if (clazz.is(Abstract))
+ ctx.error("`abstract' modifier cannot be used with value classes", clazz.pos)
+ if (!clazz.isStatic)
+ ctx.error(s"value class may not be a ${if (clazz.owner.isTerm) "local class" else "member of another class"}", clazz.pos)
+ else {
+ val clParamAccessors = clazz.asClass.paramAccessors.filter(sym => sym.isTerm && !sym.is(Method))
+ clParamAccessors match {
+ case List(param) =>
+ if (param.is(Mutable))
+ ctx.error("value class parameter must not be a var", param.pos)
+ case _ =>
+ ctx.error("value class needs to have exactly one val parameter", clazz.pos)
+ }
+ }
+ stats.foreach(checkValueClassMember)
+ }
+ }
+
+ type LevelAndIndex = immutable.Map[Symbol, (LevelInfo, Int)]
+
+ class OptLevelInfo extends DotClass {
+ def levelAndIndex: LevelAndIndex = Map()
+ def enterReference(sym: Symbol, pos: Position): Unit = ()
+ }
+
+ /** A class to help in forward reference checking */
+ class LevelInfo(outerLevelAndIndex: LevelAndIndex, stats: List[Tree])(implicit ctx: Context)
+ extends OptLevelInfo {
+ override val levelAndIndex: LevelAndIndex =
+ ((outerLevelAndIndex, 0) /: stats) {(mi, stat) =>
+ val (m, idx) = mi
+ val m1 = stat match {
+ case stat: MemberDef => m.updated(stat.symbol, (this, idx))
+ case _ => m
+ }
+ (m1, idx + 1)
+ }._1
+ var maxIndex: Int = Int.MinValue
+ var refPos: Position = _
+ var refSym: Symbol = _
+
+ override def enterReference(sym: Symbol, pos: Position): Unit =
+ if (sym.exists && sym.owner.isTerm)
+ levelAndIndex.get(sym) match {
+ case Some((level, idx)) if (level.maxIndex < idx) =>
+ level.maxIndex = idx
+ level.refPos = pos
+ level.refSym = sym
+ case _ =>
+ }
+ }
+
+ val NoLevelInfo = new OptLevelInfo()
+}
+import RefChecks._
+
+/** Post-attribution checking and transformation, which fulfills the following roles
+ *
+ * 1. This phase performs the following checks.
+ *
+ * - only one overloaded alternative defines default arguments
+ * - applyDynamic methods are not overloaded
+ * - all overrides conform to rules laid down by `checkAllOverrides`.
+ * - any value classes conform to rules laid down by `checkDerivedValueClass`.
+ * - this(...) constructor calls do not forward reference other definitions in their block (not even lazy vals).
+ * - no forward reference in a local block jumps over a non-lazy val definition.
+ * - a class and its companion object do not both define a class or module with the same name.
+ *
+ * 2. It warns about references to symbols labeled deprecated or migration.
+
+ * 3. It performs the following transformations:
+ *
+ * - if (true) A else B --> A
+ * if (false) A else B --> B
+ * - macro definitions are eliminated.
+ *
+ * 4. It makes members not private where necessary. The following members
+ * cannot be private in the Java model:
+ * - term members of traits
+ * - the primary constructor of a value class
+ * - the parameter accessor of a value class
+ * - members accessed from an inner or companion class.
+ * All these members are marked as NotJavaPrivate.
+ * Unlike in Scala 2.x not-private members keep their name. It is
+ * up to the backend to find a unique expanded name for them. The
+ * rationale to do name changes that late is that they are very fragile.
+
+ * todo: But RefChecks is not done yet. It's still a somewhat dirty port from the Scala 2 version.
+ * todo: move untrivial logic to their own mini-phases
+ */
+class RefChecks extends MiniPhase { thisTransformer =>
+
+ import tpd._
+
+ override def phaseName: String = "refchecks"
+
+ val treeTransform = new Transform(NoLevelInfo)
+
+ class Transform(currentLevel: RefChecks.OptLevelInfo = RefChecks.NoLevelInfo) extends TreeTransform {
+ def phase = thisTransformer
+
+ override def prepareForStats(trees: List[Tree])(implicit ctx: Context) = {
+ // println(i"preparing for $trees%; %, owner = ${ctx.owner}")
+ if (ctx.owner.isTerm) new Transform(new LevelInfo(currentLevel.levelAndIndex, trees))
+ else this
+ }
+
+ override def transformStats(trees: List[Tree])(implicit ctx: Context, info: TransformerInfo): List[Tree] = trees
+
+ override def transformValDef(tree: ValDef)(implicit ctx: Context, info: TransformerInfo) = {
+ checkDeprecatedOvers(tree)
+ val sym = tree.symbol
+ if (sym.exists && sym.owner.isTerm && !sym.is(Lazy))
+ currentLevel.levelAndIndex.get(sym) match {
+ case Some((level, symIdx)) if symIdx < level.maxIndex =>
+ ctx.debuglog("refsym = " + level.refSym)
+ ctx.error(s"forward reference extends over definition of $sym", level.refPos)
+ case _ =>
+ }
+ tree
+ }
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
+ checkDeprecatedOvers(tree)
+ if (tree.symbol is Macro) EmptyTree else tree
+ }
+
+ override def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo) = try {
+ val cls = ctx.owner
+ checkOverloadedRestrictions(cls)
+ checkParents(cls)
+ checkCompanionNameClashes(cls)
+ checkAllOverrides(cls)
+ checkDerivedValueClass(cls, tree.body)
+ tree
+ } catch {
+ case ex: MergeError =>
+ ctx.error(ex.getMessage, tree.pos)
+ tree
+ }
+
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) = {
+ checkUndesiredProperties(tree.symbol, tree.pos)
+ currentLevel.enterReference(tree.symbol, tree.pos)
+ tree
+ }
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) = {
+ checkUndesiredProperties(tree.symbol, tree.pos)
+ tree
+ }
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) = {
+ if (isSelfConstrCall(tree)) {
+ assert(currentLevel.isInstanceOf[LevelInfo], ctx.owner + "/" + i"$tree")
+ val level = currentLevel.asInstanceOf[LevelInfo]
+ if (level.maxIndex > 0) {
+ // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
+ ctx.debuglog("refsym = " + level.refSym)
+ ctx.error("forward reference not allowed from self constructor invocation", level.refPos)
+ }
+ }
+ tree
+ }
+
+ override def transformIf(tree: If)(implicit ctx: Context, info: TransformerInfo) =
+ tree.cond.tpe match {
+ case ConstantType(value) => if (value.booleanValue) tree.thenp else tree.elsep
+ case _ => tree
+ }
+
+ override def transformNew(tree: New)(implicit ctx: Context, info: TransformerInfo) = {
+ currentLevel.enterReference(tree.tpe.typeSymbol, tree.pos)
+ tree
+ }
+
+ override def transformTypeApply(tree: tpd.TypeApply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ tree.fun match {
+ case fun@Select(qual, selector) =>
+ val sym = tree.symbol
+
+ if (sym == defn.Any_isInstanceOf) {
+ val argType = tree.args.head.tpe
+ val qualCls = qual.tpe.widen.classSymbol
+ val argCls = argType.classSymbol
+ if (qualCls.isPrimitiveValueClass && !argCls.isPrimitiveValueClass) ctx.error("isInstanceOf cannot test if value types are references", tree.pos)
+ }
+ case _ =>
+ }
+ tree
+ }
+ }
+}
+
+/* todo: rewrite and re-enable
+
+// Comparison checking -------------------------------------------------------
+
+ object normalizeAll extends TypeMap {
+ def apply(tp: Type) = mapOver(tp).normalize
+ }
+
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match {
+ case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply =>
+ unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
+ case _ =>
+ }
+
+ private def isObjectOrAnyComparisonMethod(sym: Symbol) = sym match {
+ case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
+ case _ => false
+ }
+ /** Check the sensibility of using the given `equals` to compare `qual` and `other`. */
+ private def checkSensibleEquals(pos: Position, qual: Tree, name: Name, sym: Symbol, other: Tree) = {
+ def isReferenceOp = sym == Object_eq || sym == Object_ne
+ def isNew(tree: Tree) = tree match {
+ case Function(_, _) | Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
+ case _ => false
+ }
+ def underlyingClass(tp: Type): Symbol = {
+ val sym = tp.widen.typeSymbol
+ if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi)
+ else sym
+ }
+ val actual = underlyingClass(other.tpe)
+ val receiver = underlyingClass(qual.tpe)
+ def onTrees[T](f: List[Tree] => T) = f(List(qual, other))
+ def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
+
+ // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
+ def typesString = normalizeAll(qual.tpe.widen)+" and " + normalizeAll(other.tpe.widen)
+
+ /* Symbols which limit the warnings we can issue since they may be value types */
+ val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
+
+ // Whether def equals(other: Any) has known behavior: it is the default
+ // inherited from java.lang.Object, or it is a synthetically generated
+ // case equals. TODO - more cases are warnable if the target is a synthetic
+ // equals.
+ def isUsingWarnableEquals = {
+ val m = receiver.info.member(nme.equals_)
+ ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
+ }
+ def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
+ def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
+ // Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
+ def isUsingDefaultScalaOp = sym == Object_== || sym == Object_!= || sym == Any_== || sym == Any_!=
+ def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
+ // Whether the operands+operator represent a warnable combo (assuming anyrefs)
+ // Looking for comparisons performed with ==/!= in combination with either an
+ // equals method inherited from Object or a case class synthetic equals (for
+ // which we know the logic.)
+ def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
+ def isEitherNullable = (NullTpe <:< receiver.info) || (NullTpe <:< actual.info)
+ def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
+ def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
+ def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
+ def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
+ def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
+ def isJavaNumber(s: Symbol) = s isSubClass JavaNumberClass
+ // includes java.lang.Number if appropriate [SI-5779]
+ def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
+ def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
+ // used to short-circuit unrelatedTypes check if both sides are special
+ def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
+ val nullCount = onSyms(_ filter (_ == NullClass) size)
+ def isNonsenseValueClassCompare = (
+ !haveSubclassRelationship
+ && isUsingDefaultScalaOp
+ && isEitherValueClass
+ && !isCaseEquals
+ )
+
+ // Have we already determined that the comparison is non-sensible? I mean, non-sensical?
+ var isNonSensible = false
+
+ def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
+ val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
+ unit.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg")
+ isNonSensible = true
+ }
+ def nonSensible(pre: String, alwaysEqual: Boolean) =
+ nonSensibleWarning(s"${pre}values of types $typesString", alwaysEqual)
+ def nonSensiblyEq() = nonSensible("", alwaysEqual = true)
+ def nonSensiblyNeq() = nonSensible("", alwaysEqual = false)
+ def nonSensiblyNew() = nonSensibleWarning("a fresh object", alwaysEqual = false)
+
+ def unrelatedMsg = name match {
+ case nme.EQ | nme.eq => "never compare equal"
+ case _ => "always compare unequal"
+ }
+ def unrelatedTypes() = if (!isNonSensible) {
+ val weaselWord = if (isEitherValueClass) "" else " most likely"
+ unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
+ }
+
+ if (nullCount == 2) // null == null
+ nonSensiblyEq()
+ else if (nullCount == 1) {
+ if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
+ nonSensiblyNeq()
+ else if (onTrees( _ exists isNew)) // null == new AnyRef
+ nonSensiblyNew()
+ }
+ else if (isBoolean(receiver)) {
+ if (!isBoolean(actual) && !isMaybeValue(actual)) // true == 5
+ nonSensiblyNeq()
+ }
+ else if (isUnit(receiver)) {
+ if (isUnit(actual)) // () == ()
+ nonSensiblyEq()
+ else if (!isUnit(actual) && !isMaybeValue(actual)) // () == "abc"
+ nonSensiblyNeq()
+ }
+ else if (isNumeric(receiver)) {
+ if (!isNumeric(actual))
+ if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
+ nonSensiblyNeq()
+ }
+ else if (isWarnable && !isCaseEquals) {
+ if (isNew(qual)) // new X == y
+ nonSensiblyNew()
+ else if (isNew(other) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
+ nonSensiblyNew()
+ else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y
+ if (isEitherNullable)
+ nonSensible("non-null ", false)
+ else
+ nonSensiblyNeq()
+ }
+ }
+
+ // warn if one but not the other is a derived value class
+ // this is especially important to enable transitioning from
+ // regular to value classes without silent failures.
+ if (isNonsenseValueClassCompare)
+ unrelatedTypes()
+ // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
+ else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ // better to have lubbed and lost
+ def warnIfLubless(): Unit = {
+ val common = global.lub(List(actual.tpe, receiver.tpe))
+ if (ObjectTpe <:< common)
+ unrelatedTypes()
+ }
+ // warn if actual has a case parent that is not same as receiver's;
+ // if actual is not a case, then warn if no common supertype, as below
+ if (isCaseEquals) {
+ def thisCase = receiver.info.member(nme.equals_).owner
+ actual.info.baseClasses.find(_.isCase) match {
+ case Some(p) if p != thisCase => nonSensible("case class ", false)
+ case None =>
+ // stronger message on (Some(1) == None)
+ //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
+ //else
+ // if a class, it must be super to thisCase (and receiver) since not <: thisCase
+ if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
+ else if (!haveSubclassRelationship) warnIfLubless()
+ case _ =>
+ }
+ }
+ // warn only if they have no common supertype below Object
+ else if (!haveSubclassRelationship) {
+ warnIfLubless()
+ }
+ }
+ }
+ /** Sensibility check examines flavors of equals. */
+ def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ checkSensibleEquals(pos, qual, name, fn.symbol, args.head)
+ case _ =>
+ }
+*/
+
+/* --------------- Overflow -------------------------------------------------
+ *
+
+ def accessFlagsToString(sym: Symbol) = flagsToString(
+ sym getFlag (PRIVATE | PROTECTED),
+ if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
+ )
+
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
+ case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
+ rtp1 <:< rtp2
+ case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
+ rtp1 <:< rtp2
+ case (TypeRef(_, sym, _), _) if sym.isModuleClass =>
+ overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix)
+ case _ =>
+ def classBoundAsSeen(tp: Type) = tp.typeSymbol.classBound.asSeenFrom(prefix, tp.typeSymbol.owner)
+
+ (tp1 <:< tp2) || ( // object override check
+ tp1.typeSymbol.isModuleClass && tp2.typeSymbol.isModuleClass && {
+ val cb1 = classBoundAsSeen(tp1)
+ val cb2 = classBoundAsSeen(tp2)
+ (cb1 <:< cb2) && {
+ log("Allowing %s to override %s because %s <:< %s".format(tp1, tp2, cb1, cb2))
+ true
+ }
+ }
+ )
+ }
+ private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean)(implicit ctx: Context) = tp match {
+ case TypeRef(pre, sym, args) =>
+ tree match {
+ case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
+ // FIXME: reconcile this check with one in resetAttrs
+ case _ => checkUndesiredProperties(sym, tree.pos)
+ }
+ if (sym.isJavaDefined)
+ sym.typeParams foreach (_.cookJavaRawInfo())
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
+ case _ =>
+ }
+
+ private def checkTypeRefBounds(tp: Type, tree: Tree) = {
+ var skipBounds = false
+ tp match {
+ case AnnotatedType(ann :: Nil, underlying) if ann.symbol == UncheckedBoundsClass =>
+ skipBounds = true
+ underlying
+ case TypeRef(pre, sym, args) =>
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
+ tp
+ case _ =>
+ tp
+ }
+ }
+
+ private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach { tp =>
+ checkTypeRef(tp, tree, skipBounds = false)
+ checkTypeRefBounds(tp, tree)
+ }
+ private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
+
+ private def applyRefchecksToAnnotations(tree: Tree)(implicit ctx: Context): Unit = {
+ def applyChecks(annots: List[Annotation]) = {
+ checkAnnotations(annots map (_.atp), tree)
+ transformTrees(annots flatMap (_.args))
+ }
+
+ tree match {
+ case m: MemberDef =>
+ val sym = m.symbol
+ applyChecks(sym.annotations)
+ // validate implicitNotFoundMessage
+ analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
+ unit.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
+ }
+
+ case tpt@TypeTree() =>
+ if (tpt.original != null) {
+ tpt.original foreach {
+ case dc@TypeTreeWithDeferredRefCheck() =>
+ applyRefchecksToAnnotations(dc.check()) // #2416
+ case _ =>
+ }
+ }
+
+ doTypeTraversal(tree) {
+ case tp @ AnnotatedType(annots, _) =>
+ applyChecks(annots)
+ case tp =>
+ }
+ case _ =>
+ }
+ }
+
+ private def transformCaseApply(tree: Tree, ifNot: => Unit) = {
+ val sym = tree.symbol
+
+ def isClassTypeAccessible(tree: Tree): Boolean = tree match {
+ case TypeApply(fun, targs) =>
+ isClassTypeAccessible(fun)
+ case Select(module, apply) =>
+ ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`;
+ // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`.
+ treeInfo.isQualifierSafeToElide(module) &&
+ // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case,
+ // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+ module.symbol.companionClass.isClass
+ )
+ }
+
+ val doTransform =
+ sym.isRealMethod &&
+ sym.isCase &&
+ sym.name == nme.apply &&
+ isClassTypeAccessible(tree)
+
+ if (doTransform) {
+ tree foreach {
+ case i@Ident(_) =>
+ enterReference(i.pos, i.symbol) // SI-5390 need to `enterReference` for `a` in `a.B()`
+ case _ =>
+ }
+ toConstructor(tree.pos, tree.tpe)
+ }
+ else {
+ ifNot
+ tree
+ }
+ }
+
+ private def transformApply(tree: Apply): Tree = tree match {
+ case Apply(
+ Select(qual, nme.filter | nme.withFilter),
+ List(Function(
+ List(ValDef(_, pname, tpt, _)),
+ Match(_, CaseDef(pat1, _, _) :: _))))
+ if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) &&
+ isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) =>
+
+ transform(qual)
+
+ case Apply(fn, args) =>
+ // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability
+ // analyses in the pattern matcher
+ if (!inPattern) {
+ checkImplicitViewOptionApply(tree.pos, fn, args)
+ checkSensible(tree.pos, fn, args)
+ }
+ currentApplication = tree
+ tree
+ }
+ private def transformSelect(tree: Select): Tree = {
+ val Select(qual, _) = tree
+ val sym = tree.symbol
+
+ checkUndesiredProperties(sym, tree.pos)
+ checkDelayedInitSelect(qual, sym, tree.pos)
+
+ if (!sym.exists)
+ devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
+ else if (sym.isLocalToThis)
+ varianceValidator.checkForEscape(sym, currentClass)
+
+ def checkSuper(mix: Name) =
+ // term should have been eliminated by super accessors
+ assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY), (qual.symbol, sym, mix))
+
+ transformCaseApply(tree,
+ qual match {
+ case Super(_, mix) => checkSuper(mix)
+ case _ =>
+ }
+ )
+ }
+ private def transformIf(tree: If): Tree = {
+ val If(cond, thenpart, elsepart) = tree
+ def unitIfEmpty(t: Tree): Tree =
+ if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t
+
+ cond.tpe match {
+ case ConstantType(value) =>
+ val res = if (value.booleanValue) thenpart else elsepart
+ unitIfEmpty(res)
+ case _ => tree
+ }
+ }
+
+ // Warning about nullary methods returning Unit. TODO: move to lint
+ private def checkNullaryMethodReturnType(sym: Symbol) = sym.tpe match {
+ case NullaryMethodType(restpe) if restpe.typeSymbol == UnitClass =>
+ // this may be the implementation of e.g. a generic method being parameterized
+ // on Unit, in which case we had better let it slide.
+ val isOk = (
+ sym.isGetter
+ || (sym.name containsName nme.DEFAULT_GETTER_STRING)
+ || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
+ )
+ if (!isOk)
+ unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
+ case _ => ()
+ }
+
+ /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
+ def toConstructor(pos: Position, tpe: Type)(implicit ctx: Context): Tree = {
+ val rtpe = tpe.finalResultType
+ assert(rtpe.typeSymbol.is(Case), tpe)
+ New(rtpe).withPos(pos).select(rtpe.typeSymbol.primaryConstructor)
+ }
+ private def isIrrefutable(pat: Tree, seltpe: Type): Boolean = pat match {
+ case Apply(_, args) =>
+ val clazz = pat.tpe.typeSymbol
+ clazz == seltpe.typeSymbol &&
+ clazz.isCaseClass &&
+ (args corresponds clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes)(isIrrefutable)
+ case Typed(pat, tpt) =>
+ seltpe <:< tpt.tpe
+ case Ident(tpnme.WILDCARD) =>
+ true
+ case Bind(_, pat) =>
+ isIrrefutable(pat, seltpe)
+ case _ =>
+ false
+ }
+ private def checkDelayedInitSelect(qual: Tree, sym: Symbol, pos: Position) = {
+ def isLikelyUninitialized = (
+ (sym.owner isSubClass DelayedInitClass)
+ && !qual.tpe.isInstanceOf[ThisType]
+ && sym.accessedOrSelf.isVal
+ )
+ if (settings.lint.value && isLikelyUninitialized)
+ unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
+ }
+ private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
+ (otherSym != NoSymbol)
+ && !otherSym.isProtected
+ && !otherSym.isTypeParameterOrSkolem
+ && !otherSym.isExistentiallyBound
+ && (otherSym isLessAccessibleThan memberSym)
+ && (otherSym isLessAccessibleThan memberSym.enclClass)
+ )
+ private def lessAccessibleSymsInType(other: Type, memberSym: Symbol): List[Symbol] = {
+ val extras = other match {
+ case TypeRef(pre, _, args) =>
+ // checking the prefix here gives us spurious errors on e.g. a private[process]
+ // object which contains a type alias, which normalizes to a visible type.
+ args filterNot (_ eq NoPrefix) flatMap (tp => lessAccessibleSymsInType(tp, memberSym))
+ case _ =>
+ Nil
+ }
+ if (lessAccessible(other.typeSymbol, memberSym)) other.typeSymbol :: extras
+ else extras
+ }
+ private def warnLessAccessible(otherSym: Symbol, memberSym: Symbol) {
+ val comparison = accessFlagsToString(memberSym) match {
+ case "" => ""
+ case acc => " is " + acc + " but"
+ }
+ val cannot =
+ if (memberSym.isDeferred) "may be unable to provide a concrete implementation of"
+ else "may be unable to override"
+
+ unit.warning(memberSym.pos,
+ "%s%s references %s %s.".format(
+ memberSym.fullLocationString, comparison,
+ accessFlagsToString(otherSym), otherSym
+ ) + "\nClasses which cannot access %s %s %s.".format(
+ otherSym.decodedName, cannot, memberSym.decodedName)
+ )
+ }
+
+ /** Warn about situations where a method signature will include a type which
+ * has more restrictive access than the method itself.
+ */
+ private def checkAccessibilityOfReferencedTypes(tree: Tree) {
+ val member = tree.symbol
+
+ def checkAccessibilityOfType(tpe: Type) {
+ val inaccessible = lessAccessibleSymsInType(tpe, member)
+ // if the unnormalized type is accessible, that's good enough
+ if (inaccessible.isEmpty) ()
+ // or if the normalized type is, that's good too
+ else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) ()
+ // otherwise warn about the inaccessible syms in the unnormalized type
+ else inaccessible foreach (sym => warnLessAccessible(sym, member))
+ }
+
+ // types of the value parameters
+ mapParamss(member)(p => checkAccessibilityOfType(p.tpe))
+ // upper bounds of type parameters
+ member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
+ }
+
+ private def checkByNameRightAssociativeDef(tree: DefDef) {
+ tree match {
+ case DefDef(_, name, _, params :: _, _, _) =>
+ if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
+ unit.warning(tree.pos,
+ "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.")
+ case _ =>
+ }
+ }
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = {
+ //val savedLocalTyper = localTyper
+ try {
+ val sym = tree.symbol
+ checkOverloadedRestrictions(ctx.owner)
+ checkAllOverrides(ctx.owner)
+ checkAnyValSubclass(ctx.owner)
+ if (ctx.owner.isDerivedValueClass)
+ ctx.owner.primaryConstructor.makeNotPrivateAfter(NoSymbol, thisTransformer) // SI-6601, must be done *after* pickler!
+ tree
+
+
+ // Apply RefChecks to annotations. Makes sure the annotations conform to
+ // type bounds (bug #935), issues deprecation warnings for symbols used
+ // inside annotations.
+ // applyRefchecksToAnnotations(tree) ???
+ var result: Tree = tree match {
+ case tree: ValOrDefDef =>
+ // move to lint:
+ // if (settings.warnNullaryUnit)
+ // checkNullaryMethodReturnType(sym)
+ // if (settings.warnInaccessible) {
+ // if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
+ // checkAccessibilityOfReferencedTypes(tree)
+ // }
+ // tree match {
+ // case dd: DefDef => checkByNameRightAssociativeDef(dd)
+ // case _ =>
+ // }
+ tree
+
+ case Template(constr, parents, self, body) =>
+ // localTyper = localTyper.atOwner(tree, currentOwner)
+ checkOverloadedRestrictions(ctx.owner)
+ checkAllOverrides(ctx.owner)
+ checkAnyValSubclass(ctx.owner)
+ if (ctx.owner.isDerivedValueClass)
+ ctx.owner.primaryConstructor.makeNotPrivateAfter(NoSymbol, thisTransformer) // SI-6601, must be done *after* pickler!
+ tree
+
+ case tpt: TypeTree =>
+ transform(tpt.original)
+ tree
+
+ case TypeApply(fn, args) =>
+ checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe))
+ transformCaseApply(tree, ())
+
+ case x @ Apply(_, _) =>
+ transformApply(x)
+
+ case x @ If(_, _, _) =>
+ transformIf(x)
+
+ case New(tpt) =>
+ enterReference(tree.pos, tpt.tpe.typeSymbol)
+ tree
+
+ case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
+ unit.error(tree.pos, "no `: _*' annotation allowed here\n" +
+ "(such annotations are only allowed in arguments to *-parameters)")
+ tree
+
+ case Ident(name) =>
+ checkUndesiredProperties(sym, tree.pos)
+ transformCaseApply(tree,
+ if (name != nme.WILDCARD && name != tpnme.WILDCARD_STAR) {
+ assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug
+ enterReference(tree.pos, sym)
+ }
+ )
+
+ case x @ Select(_, _) =>
+ transformSelect(x)
+
+ case UnApply(fun, args) =>
+ transform(fun) // just make sure we enterReference for unapply symbols, note that super.transform(tree) would not transform(fun)
+ // transformTrees(args) // TODO: is this necessary? could there be forward references in the args??
+ // probably not, until we allow parameterised extractors
+ tree
+
+
+ case _ => tree
+ }
+
+ // skip refchecks in patterns....
+ result = result match {
+ case CaseDef(pat, guard, body) =>
+ val pat1 = savingInPattern {
+ inPattern = true
+ transform(pat)
+ }
+ treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ savingInPattern {
+ inPattern = true
+ deriveLabelDef(result)(transform)
+ }
+ case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) =>
+ savingInPattern {
+ // SI-7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals
+ // that we are in the user-supplied code in the case body.
+ //
+ // Relies on the translation of:
+ // (null: Any) match { case x: List[_] => x; x.reverse; case _ => }'
+ // to:
+ // <synthetic> val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]);
+ // matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply.
+ inPattern = false
+ super.transform(result)
+ }
+ case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
+ case _ =>
+ super.transform(result)
+ }
+ result match {
+ case ClassDef(_, _, _, _)
+ | TypeDef(_, _, _, _) =>
+ if (result.symbol.isLocalToBlock || result.symbol.isTopLevel)
+ varianceValidator.traverse(result)
+ case tt @ TypeTree() if tt.original != null =>
+ varianceValidator.traverse(tt.original) // See SI-7872
+ case _ =>
+ }
+
+ checkUnexpandedMacro(result)
+
+ result
+ } catch {
+ case ex: TypeError =>
+ if (settings.debug) ex.printStackTrace()
+ unit.error(tree.pos, ex.getMessage())
+ tree
+ } finally {
+ localTyper = savedLocalTyper
+ currentApplication = savedCurrentApplication
+ }
+ }
+*/
+
diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
new file mode 100644
index 000000000..ee2d68278
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -0,0 +1,524 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast._
+import Scopes._, Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._
+import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._, TypeErasure._
+import TypeApplications.AppliedType
+import util.Positions._
+import config.Printers.typr
+import ast.Trees._
+import NameOps._
+import collection.mutable
+import reporting.diagnostic.Message
+import reporting.diagnostic.messages._
+
+trait TypeAssigner {
+ import tpd._
+
+ /** The qualifying class of a this or super with prefix `qual` (which might be empty).
+ * @param packageOk The qualifier may refer to a package.
+ */
+ def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(implicit ctx: Context): Symbol = {
+ def qualifies(sym: Symbol) =
+ sym.isClass && (
+ qual.isEmpty ||
+ sym.name == qual ||
+ sym.is(Module) && sym.name.stripModuleClassSuffix == qual)
+ ctx.outersIterator.map(_.owner).find(qualifies) match {
+ case Some(c) if packageOK || !(c is Package) =>
+ c
+ case _ =>
+ ctx.error(
+ if (qual.isEmpty) tree.show + " can be used only in a class, object, or template"
+ else qual.show + " is not an enclosing class", tree.pos)
+ NoSymbol
+ }
+ }
+
+ /** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid`.
+ * Approximation steps are:
+ *
+ * - follow aliases and upper bounds if the original refers to a forbidden symbol
+ * - widen termrefs that refer to a forbidden symbol
+ * - replace ClassInfos of forbidden classes by the intersection of their parents, refined by all
+ * non-private fields, methods, and type members.
+ * - if the prefix of a class refers to a forbidden symbol, first try to replace the prefix,
+ * if this is not possible, replace the ClassInfo as above.
+ * - drop refinements referring to a forbidden symbol.
+ */
+ def avoid(tp: Type, symsToAvoid: => List[Symbol])(implicit ctx: Context): Type = {
+ val widenMap = new TypeMap {
+ lazy val forbidden = symsToAvoid.toSet
+ def toAvoid(tp: Type): Boolean =
+ // TODO: measure the cost of using `existsPart`, and if necessary replace it
+ // by a `TypeAccumulator` where we have set `stopAtStatic = true`.
+ tp existsPart {
+ case tp: NamedType => forbidden contains tp.symbol
+ case tp: ThisType => forbidden contains tp.cls
+ case _ => false
+ }
+ def apply(tp: Type): Type = tp match {
+ case tp: TermRef
+ if toAvoid(tp) && (variance > 0 || tp.info.widenExpr <:< tp) =>
+ // Can happen if `x: y.type`, then `x.type =:= y.type`, hence we can widen `x.type`
+ // to y.type in all contexts, not just covariant ones.
+ apply(tp.info.widenExpr)
+ case tp: TypeRef if toAvoid(tp) =>
+ tp.info match {
+ case TypeAlias(ref) =>
+ apply(ref)
+ case info: ClassInfo if variance > 0 =>
+ if (!(forbidden contains tp.symbol)) {
+ val prefix = apply(tp.prefix)
+ val tp1 = tp.derivedSelect(prefix)
+ if (tp1.typeSymbol.exists)
+ return tp1
+ }
+ val parentType = info.parentsWithArgs.reduceLeft(ctx.typeComparer.andType(_, _))
+ def addRefinement(parent: Type, decl: Symbol) = {
+ val inherited =
+ parentType.findMember(decl.name, info.cls.thisType, Private)
+ .suchThat(decl.matches(_))
+ val inheritedInfo = inherited.info
+ if (inheritedInfo.exists && decl.info <:< inheritedInfo && !(inheritedInfo <:< decl.info)) {
+ val r = RefinedType(parent, decl.name, decl.info)
+ typr.println(i"add ref $parent $decl --> " + r)
+ r
+ }
+ else
+ parent
+ }
+ val refinableDecls = info.decls.filterNot(
+ sym => sym.is(TypeParamAccessor | Private) || sym.isConstructor)
+ val fullType = (parentType /: refinableDecls)(addRefinement)
+ mapOver(fullType)
+ case TypeBounds(lo, hi) if variance > 0 =>
+ apply(hi)
+ case _ =>
+ mapOver(tp)
+ }
+ case tp @ HKApply(tycon, args) if toAvoid(tycon) =>
+ apply(tp.superType)
+ case tp @ AppliedType(tycon, args) if toAvoid(tycon) =>
+ val base = apply(tycon)
+ var args = tp.baseArgInfos(base.typeSymbol)
+ if (base.typeParams.length != args.length)
+ args = base.typeParams.map(_.paramBounds)
+ apply(base.appliedTo(args))
+ case tp @ RefinedType(parent, name, rinfo) if variance > 0 =>
+ val parent1 = apply(tp.parent)
+ val refinedInfo1 = apply(rinfo)
+ if (toAvoid(refinedInfo1)) {
+ typr.println(s"dropping refinement from $tp")
+ if (name.isTypeName) tp.derivedRefinedType(parent1, name, TypeBounds.empty)
+ else parent1
+ } else {
+ tp.derivedRefinedType(parent1, name, refinedInfo1)
+ }
+ case tp: TypeVar if ctx.typerState.constraint.contains(tp) =>
+ val lo = ctx.typerState.constraint.fullLowerBound(tp.origin)
+ val lo1 = avoid(lo, symsToAvoid)
+ if (lo1 ne lo) lo1 else tp
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ widenMap(tp)
+ }
+
+ def avoidingType(expr: Tree, bindings: List[Tree])(implicit ctx: Context): Type =
+ avoid(expr.tpe, localSyms(bindings).filter(_.isTerm))
+
+ def seqToRepeated(tree: Tree)(implicit ctx: Context): Tree =
+ Typed(tree, TypeTree(tree.tpe.widen.translateParameterized(defn.SeqClass, defn.RepeatedParamClass)))
+
+ /** A denotation exists really if it exists and does not point to a stale symbol. */
+ final def reallyExists(denot: Denotation)(implicit ctx: Context): Boolean = try
+ denot match {
+ case denot: SymDenotation =>
+ denot.exists && {
+ denot.ensureCompleted
+ !denot.isAbsent
+ }
+ case denot: SingleDenotation =>
+ val sym = denot.symbol
+ (sym eq NoSymbol) || reallyExists(sym.denot)
+ case _ =>
+ true
+ }
+ catch {
+ case ex: StaleSymbol => false
+ }
+
+ /** If `tpe` is a named type, check that its denotation is accessible in the
+ * current context. Return the type with those alternatives as denotations
+ * which are accessible.
+ *
+ * Also performs the following normalizations on the type `tpe`.
+ * (1) parameter accessors are always dereferenced.
+ * (2) if the owner of the denotation is a package object, it is assured
+ * that the package object shows up as the prefix.
+ */
+ def ensureAccessible(tpe: Type, superAccess: Boolean, pos: Position)(implicit ctx: Context): Type = {
+ def test(tpe: Type, firstTry: Boolean): Type = tpe match {
+ case tpe: NamedType =>
+ val pre = tpe.prefix
+ val name = tpe.name
+ val d = tpe.denot.accessibleFrom(pre, superAccess)
+ if (!d.exists) {
+ // it could be that we found an inaccessible private member, but there is
+ // an inherited non-private member with the same name and signature.
+ val d2 = pre.nonPrivateMember(name)
+ if (reallyExists(d2) && firstTry)
+ test(tpe.shadowed.withDenot(d2), false)
+ else if (pre.derivesFrom(defn.DynamicClass)) {
+ TryDynamicCallType
+ } else {
+ val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists)
+ val what = alts match {
+ case Nil =>
+ name.toString
+ case sym :: Nil =>
+ if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated
+ case _ =>
+ em"none of the overloaded alternatives named $name"
+ }
+ val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else ""
+ val whyNot = new StringBuffer
+ alts foreach (_.isAccessibleFrom(pre, superAccess, whyNot))
+ if (!tpe.isError)
+ ctx.error(ex"$what cannot be accessed as a member of $pre$where.$whyNot", pos)
+ ErrorType
+ }
+ }
+ else if (d.symbol is TypeParamAccessor)
+ if (d.info.isAlias)
+ ensureAccessible(d.info.bounds.hi, superAccess, pos)
+ else // It's a named parameter, use the non-symbolic representation to pick up inherited versions as well
+ d.symbol.owner.thisType.select(d.symbol.name)
+ else
+ ctx.makePackageObjPrefixExplicit(tpe withDenot d)
+ case _ =>
+ tpe
+ }
+ test(tpe, true)
+ }
+
+ /** The type of a selection with `name` of a tree with type `site`.
+ */
+ def selectionType(site: Type, name: Name, pos: Position)(implicit ctx: Context): Type = {
+ val mbr = site.member(name)
+ if (reallyExists(mbr)) site.select(name, mbr)
+ else if (site.derivesFrom(defn.DynamicClass) && !Dynamic.isDynamicMethod(name)) {
+ TryDynamicCallType
+ } else {
+ if (!site.isErroneous) {
+ def kind = if (name.isTypeName) "type" else "value"
+ def addendum =
+ if (site.derivesFrom(defn.DynamicClass)) "\npossible cause: maybe a wrong Dynamic method signature?"
+ else ""
+ ctx.error(
+ if (name == nme.CONSTRUCTOR) ex"$site does not have a constructor"
+ else NotAMember(site, name, kind),
+ pos)
+ }
+ ErrorType
+ }
+ }
+
+ /** The selection type, which is additionally checked for accessibility.
+ */
+ def accessibleSelectionType(tree: untpd.RefTree, qual1: Tree)(implicit ctx: Context): Type = {
+ val ownType = selectionType(qual1.tpe.widenIfUnstable, tree.name, tree.pos)
+ ensureAccessible(ownType, qual1.isInstanceOf[Super], tree.pos)
+ }
+
+ /** Type assignment method. Each method takes as parameters
+ * - an untpd.Tree to which it assigns a type,
+ * - typed child trees it needs to access to cpmpute that type,
+ * - any further information it needs to access to compute that type.
+ */
+
+ def assignType(tree: untpd.Ident, tp: Type)(implicit ctx: Context) =
+ tree.withType(tp)
+
+ def assignType(tree: untpd.Select, qual: Tree)(implicit ctx: Context): Select = {
+ def qualType = qual.tpe.widen
+ def arrayElemType = {
+ val JavaArrayType(elemtp) = qualType
+ elemtp
+ }
+ val p = nme.primitive
+ val tp = tree.name match {
+ case p.arrayApply => MethodType(defn.IntType :: Nil, arrayElemType)
+ case p.arrayUpdate => MethodType(defn.IntType :: arrayElemType :: Nil, defn.UnitType)
+ case p.arrayLength => MethodType(Nil, defn.IntType)
+
+ // Note that we do not need to handle calls to Array[T]#clone() specially:
+ // The JLS section 10.7 says "The return type of the clone method of an array type
+ // T[] is T[]", but the actual return type at the bytecode level is Object which
+ // is casted to T[] by javac. Since the return type of Array[T]#clone() is Array[T],
+ // this is exactly what Erasure will do.
+
+ case _ => accessibleSelectionType(tree, qual)
+ }
+ tree.withType(tp)
+ }
+
+ def assignType(tree: untpd.New, tpt: Tree)(implicit ctx: Context) =
+ tree.withType(tpt.tpe)
+
+ def assignType(tree: untpd.Literal)(implicit ctx: Context) =
+ tree.withType {
+ val value = tree.const
+ value.tag match {
+ case UnitTag => defn.UnitType
+ case NullTag => defn.NullType
+ case _ => if (ctx.erasedTypes) value.tpe else ConstantType(value)
+ }
+ }
+
+ def assignType(tree: untpd.This)(implicit ctx: Context) = {
+ val cls = qualifyingClass(tree, tree.qual.name, packageOK = false)
+ tree.withType(cls.thisType)
+ }
+
+ def assignType(tree: untpd.Super, qual: Tree, inConstrCall: Boolean, mixinClass: Symbol = NoSymbol)(implicit ctx: Context) = {
+ val mix = tree.mix
+ val qtype @ ThisType(_) = qual.tpe
+ val cls = qtype.cls
+
+ def findMixinSuper(site: Type): Type = site.parents filter (_.name == mix.name) match {
+ case p :: Nil =>
+ p
+ case Nil =>
+ errorType(em"$mix does not name a parent class of $cls", tree.pos)
+ case p :: q :: _ =>
+ errorType("ambiguous parent class qualifier", tree.pos)
+ }
+ val owntype =
+ if (mixinClass.exists) mixinClass.typeRef
+ else if (!mix.isEmpty) findMixinSuper(cls.info)
+ else if (inConstrCall || ctx.erasedTypes) cls.info.firstParent
+ else {
+ val ps = cls.classInfo.parentsWithArgs
+ if (ps.isEmpty) defn.AnyType else ps.reduceLeft((x: Type, y: Type) => x & y)
+ }
+ tree.withType(SuperType(cls.thisType, owntype))
+ }
+
+ def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(implicit ctx: Context) = {
+ val ownType = fn.tpe.widen match {
+ case fntpe @ MethodType(_, ptypes) =>
+ if (sameLength(ptypes, args) || ctx.phase.prev.relaxedTyping) fntpe.instantiate(args.tpes)
+ else wrongNumberOfArgs(fn.tpe, "", fntpe.typeParams, args, tree.pos)
+ case t =>
+ errorType(i"${err.exprStr(fn)} does not take parameters", tree.pos)
+ }
+ tree.withType(ownType)
+ }
+
+ def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(implicit ctx: Context) = {
+ val ownType = fn.tpe.widen match {
+ case pt: PolyType =>
+ val paramNames = pt.paramNames
+ if (hasNamedArg(args)) {
+ // Type arguments which are specified by name (immutable after this first loop)
+ val namedArgMap = new mutable.HashMap[Name, Type]
+ for (NamedArg(name, arg) <- args)
+ if (namedArgMap.contains(name))
+ ctx.error("duplicate name", arg.pos)
+ else if (!paramNames.contains(name))
+ ctx.error(s"undefined parameter name, required: ${paramNames.mkString(" or ")}", arg.pos)
+ else
+ namedArgMap(name) = arg.tpe
+
+ // Holds indexes of non-named typed arguments in paramNames
+ val gapBuf = new mutable.ListBuffer[Int]
+ def nextPoly(idx: Int) = {
+ val newIndex = gapBuf.length
+ gapBuf += idx
+ // Re-index unassigned type arguments that remain after transformation
+ PolyParam(pt, newIndex)
+ }
+
+ // Type parameters after naming assignment, conserving paramNames order
+ val normArgs: List[Type] = paramNames.zipWithIndex.map { case (pname, idx) =>
+ namedArgMap.getOrElse(pname, nextPoly(idx))
+ }
+
+ val transform = new TypeMap {
+ def apply(t: Type) = t match {
+ case PolyParam(`pt`, idx) => normArgs(idx)
+ case _ => mapOver(t)
+ }
+ }
+ val resultType1 = transform(pt.resultType)
+ if (gapBuf.isEmpty) resultType1
+ else {
+ val gaps = gapBuf.toList
+ pt.derivedPolyType(
+ gaps.map(paramNames),
+ gaps.map(idx => transform(pt.paramBounds(idx)).bounds),
+ resultType1)
+ }
+ }
+ else {
+ val argTypes = args.tpes
+ if (sameLength(argTypes, paramNames) || ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
+ else wrongNumberOfArgs(fn.tpe, "type", pt.typeParams, args, tree.pos)
+ }
+ case _ =>
+ errorType(i"${err.exprStr(fn)} does not take type parameters", tree.pos)
+ }
+
+ tree.withType(ownType)
+ }
+
+ def assignType(tree: untpd.Typed, tpt: Tree)(implicit ctx: Context) =
+ tree.withType(tpt.tpe)
+
+ def assignType(tree: untpd.NamedArg, arg: Tree)(implicit ctx: Context) =
+ tree.withType(arg.tpe)
+
+ def assignType(tree: untpd.Assign)(implicit ctx: Context) =
+ tree.withType(defn.UnitType)
+
+ def assignType(tree: untpd.Block, stats: List[Tree], expr: Tree)(implicit ctx: Context) =
+ tree.withType(avoidingType(expr, stats))
+
+ def assignType(tree: untpd.Inlined, bindings: List[Tree], expansion: Tree)(implicit ctx: Context) =
+ tree.withType(avoidingType(expansion, bindings))
+
+ def assignType(tree: untpd.If, thenp: Tree, elsep: Tree)(implicit ctx: Context) =
+ tree.withType(thenp.tpe | elsep.tpe)
+
+ def assignType(tree: untpd.Closure, meth: Tree, target: Tree)(implicit ctx: Context) =
+ tree.withType(
+ if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length)
+ else target.tpe)
+
+ def assignType(tree: untpd.CaseDef, body: Tree)(implicit ctx: Context) =
+ tree.withType(body.tpe)
+
+ def assignType(tree: untpd.Match, cases: List[CaseDef])(implicit ctx: Context) =
+ tree.withType(ctx.typeComparer.lub(cases.tpes))
+
+ def assignType(tree: untpd.Return)(implicit ctx: Context) =
+ tree.withType(defn.NothingType)
+
+ def assignType(tree: untpd.Try, expr: Tree, cases: List[CaseDef])(implicit ctx: Context) =
+ if (cases.isEmpty) tree.withType(expr.tpe)
+ else tree.withType(ctx.typeComparer.lub(expr.tpe :: cases.tpes))
+
+ def assignType(tree: untpd.SeqLiteral, elems: List[Tree], elemtpt: Tree)(implicit ctx: Context) = {
+ val ownType = tree match {
+ case tree: untpd.JavaSeqLiteral => defn.ArrayOf(elemtpt.tpe)
+ case _ => if (ctx.erasedTypes) defn.SeqType else defn.SeqType.appliedTo(elemtpt.tpe)
+ }
+ tree.withType(ownType)
+ }
+
+ def assignType(tree: untpd.SingletonTypeTree, ref: Tree)(implicit ctx: Context) =
+ tree.withType(ref.tpe)
+
+ def assignType(tree: untpd.AndTypeTree, left: Tree, right: Tree)(implicit ctx: Context) =
+ tree.withType(left.tpe & right.tpe)
+
+ def assignType(tree: untpd.OrTypeTree, left: Tree, right: Tree)(implicit ctx: Context) =
+ tree.withType(left.tpe | right.tpe)
+
+ /** Assign type of RefinedType.
+ * Refinements are typed as if they were members of refinement class `refineCls`.
+ */
+ def assignType(tree: untpd.RefinedTypeTree, parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(implicit ctx: Context) = {
+ def addRefinement(parent: Type, refinement: Tree): Type = {
+ val rsym = refinement.symbol
+ val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info
+ RefinedType(parent, rsym.name, rinfo)
+ }
+ val refined = (parent.tpe /: refinements)(addRefinement)
+ tree.withType(RecType.closeOver(rt => refined.substThis(refineCls, RecThis(rt))))
+ }
+
+ def assignType(tree: untpd.AppliedTypeTree, tycon: Tree, args: List[Tree])(implicit ctx: Context) = {
+ val tparams = tycon.tpe.typeParams
+ lazy val ntparams = tycon.tpe.namedTypeParams
+ def refineNamed(tycon: Type, arg: Tree) = arg match {
+ case ast.Trees.NamedArg(name, argtpt) =>
+ // Dotty deviation: importing ast.Trees._ and matching on NamedArg gives a cyclic ref error
+ val tparam = tparams.find(_.paramName == name) match {
+ case Some(tparam) => tparam
+ case none => ntparams.find(_.name == name).getOrElse(NoSymbol)
+ }
+ if (tparam.isTypeParam) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam))
+ else errorType(i"$tycon does not have a parameter or abstract type member named $name", arg.pos)
+ case _ =>
+ errorType(s"named and positional type arguments may not be mixed", arg.pos)
+ }
+ val ownType =
+ if (hasNamedArg(args)) (tycon.tpe /: args)(refineNamed)
+ else if (sameLength(tparams, args)) tycon.tpe.appliedTo(args.tpes)
+ else wrongNumberOfArgs(tycon.tpe, "type", tparams, args, tree.pos)
+ tree.withType(ownType)
+ }
+
+ def assignType(tree: untpd.PolyTypeTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) =
+ tree.withType(body.tpe.LambdaAbstract(tparamDefs.map(_.symbol)))
+
+ def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) =
+ tree.withType(ExprType(result.tpe))
+
+ def assignType(tree: untpd.TypeBoundsTree, lo: Tree, hi: Tree)(implicit ctx: Context) =
+ tree.withType(if (lo eq hi) TypeAlias(lo.tpe) else TypeBounds(lo.tpe, hi.tpe))
+
+ def assignType(tree: untpd.Bind, sym: Symbol)(implicit ctx: Context) =
+ tree.withType(NamedType.withFixedSym(NoPrefix, sym))
+
+ def assignType(tree: untpd.Alternative, trees: List[Tree])(implicit ctx: Context) =
+ tree.withType(ctx.typeComparer.lub(trees.tpes))
+
+ def assignType(tree: untpd.UnApply, proto: Type)(implicit ctx: Context) =
+ tree.withType(proto)
+
+ def assignType(tree: untpd.ValDef, sym: Symbol)(implicit ctx: Context) =
+ tree.withType(if (sym.exists) assertExists(symbolicIfNeeded(sym).orElse(sym.valRef)) else NoType)
+
+ def assignType(tree: untpd.DefDef, sym: Symbol)(implicit ctx: Context) =
+ tree.withType(symbolicIfNeeded(sym).orElse(sym.termRefWithSig))
+
+ def assignType(tree: untpd.TypeDef, sym: Symbol)(implicit ctx: Context) =
+ tree.withType(symbolicIfNeeded(sym).orElse(sym.typeRef))
+
+ private def symbolicIfNeeded(sym: Symbol)(implicit ctx: Context) = {
+ val owner = sym.owner
+ owner.infoOrCompleter match {
+ case info: ClassInfo if info.givenSelfType.exists =>
+ // In that case a simple typeRef/termWithWithSig could return a member of
+ // the self type, not the symbol itself. To avoid this, we make the reference
+ // symbolic. In general it seems to be faster to keep the non-symblic
+ // reference, since there is less pressure on the uniqueness tables that way
+ // and less work to update all the different references. That's why symbolic references
+ // are only used if necessary.
+ NamedType.withFixedSym(owner.thisType, sym)
+ case _ => NoType
+ }
+ }
+
+ def assertExists(tp: Type) = { assert(tp != NoType); tp }
+
+ def assignType(tree: untpd.Import, sym: Symbol)(implicit ctx: Context) =
+ tree.withType(sym.nonMemberTermRef)
+
+ def assignType(tree: untpd.Annotated, arg: Tree, annot: Tree)(implicit ctx: Context) =
+ tree.withType(AnnotatedType(arg.tpe.widen, Annotation(annot)))
+
+ def assignType(tree: untpd.PackageDef, pid: Tree)(implicit ctx: Context) =
+ tree.withType(pid.symbol.valRef)
+}
+
+object TypeAssigner extends TypeAssigner
+
diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala
new file mode 100644
index 000000000..64936e106
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala
@@ -0,0 +1,1952 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import ast._
+import Trees._
+import Constants._
+import StdNames._
+import Scopes._
+import Denotations._
+import ProtoTypes._
+import Contexts._
+import Comments._
+import Symbols._
+import Types._
+import SymDenotations._
+import Annotations._
+import Names._
+import NameOps._
+import Flags._
+import Decorators._
+import ErrorReporting._
+import Checking._
+import Inferencing._
+import EtaExpansion.etaExpand
+import dotty.tools.dotc.transform.Erasure.Boxing
+import util.Positions._
+import util.common._
+import util.SourcePosition
+import collection.mutable
+import annotation.tailrec
+import Implicits._
+import util.Stats.{track, record}
+import config.Printers.{typr, gadts}
+import rewrite.Rewrites.patch
+import NavigateAST._
+import transform.SymUtils._
+import language.implicitConversions
+import printing.SyntaxHighlighting._
+
+object Typer {
+
+ /** The precedence of bindings which determines which of several bindings will be
+ * accessed by an Ident.
+ */
+ object BindingPrec {
+ val definition = 4
+ val namedImport = 3
+ val wildImport = 2
+ val packageClause = 1
+ val nothingBound = 0
+ def isImportPrec(prec: Int) = prec == namedImport || prec == wildImport
+ }
+
+ /** Assert tree has a position, unless it is empty or a typed splice */
+ def assertPositioned(tree: untpd.Tree)(implicit ctx: Context) =
+ if (!tree.isEmpty && !tree.isInstanceOf[untpd.TypedSplice] && ctx.typerState.isGlobalCommittable)
+ assert(tree.pos.exists, s"position not set for $tree # ${tree.uniqueId}")
+}
+
+class Typer extends Namer with TypeAssigner with Applications with Implicits with Dynamic with Checking with Docstrings {
+
+ import Typer._
+ import tpd.{cpy => _, _}
+ import untpd.cpy
+ import Dynamic.isDynamicMethod
+ import reporting.diagnostic.Message
+ import reporting.diagnostic.messages._
+
+ /** A temporary data item valid for a single typed ident:
+ * The set of all root import symbols that have been
+ * encountered as a qualifier of an import so far.
+ * Note: It would be more proper to move importedFromRoot into typedIdent.
+ * We should check that this has no performance degradation, however.
+ */
+ private var importedFromRoot: Set[Symbol] = Set()
+
+ /** Temporary data item for single call to typed ident:
+ * This symbol would be found under Scala2 mode, but is not
+ * in dotty (because dotty conforms to spec section 2
+ * wrt to package member resolution but scalac doe not).
+ */
+ private var foundUnderScala2: Type = NoType
+
+ def newLikeThis: Typer = new Typer
+
+ /** Attribute an identifier consisting of a simple name or wildcard
+ *
+ * @param tree The tree representing the identifier.
+ * Transformations: (1) Prefix class members with this.
+ * (2) Change imported symbols to selections.
+ * (3) Change pattern Idents id (but not wildcards) to id @ _
+ */
+ def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree = track("typedIdent") {
+ val refctx = ctx
+ val name = tree.name
+ val noImports = ctx.mode.is(Mode.InPackageClauseName)
+
+ /** Method is necessary because error messages need to bind to
+ * to typedIdent's context which is lost in nested calls to findRef
+ */
+ def error(msg: => Message, pos: Position) = ctx.error(msg, pos)
+
+ /** Is this import a root import that has been shadowed by an explicit
+ * import in the same program?
+ */
+ def isDisabled(imp: ImportInfo, site: Type): Boolean = {
+ if (imp.isRootImport && (importedFromRoot contains site.termSymbol)) return true
+ if (imp.hiddenRoot.exists) importedFromRoot += imp.hiddenRoot
+ false
+ }
+
+ /** Does this identifier appear as a constructor of a pattern? */
+ def isPatternConstr =
+ if (ctx.mode.isExpr && (ctx.outer.mode is Mode.Pattern))
+ ctx.outer.tree match {
+ case Apply(`tree`, _) => true
+ case _ => false
+ }
+ else false
+
+ /** A symbol qualifies if it really exists. In addition,
+ * if we are in a constructor of a pattern, we ignore all definitions
+ * which are methods and not accessors (note: if we don't do that
+ * case x :: xs in class List would return the :: method).
+ */
+ def qualifies(denot: Denotation): Boolean =
+ reallyExists(denot) && !(
+ pt.isInstanceOf[UnapplySelectionProto] &&
+ (denot.symbol is (Method, butNot = Accessor)))
+
+ /** Find the denotation of enclosing `name` in given context `ctx`.
+ * @param previous A denotation that was found in a more deeply nested scope,
+ * or else `NoDenotation` if nothing was found yet.
+ * @param prevPrec The binding precedence of the previous denotation,
+ * or else `nothingBound` if nothing was found yet.
+ * @param prevCtx The context of the previous denotation,
+ * or else `NoContext` if nothing was found yet.
+ */
+ def findRef(previous: Type, prevPrec: Int, prevCtx: Context)(implicit ctx: Context): Type = {
+ import BindingPrec._
+
+ /** A string which explains how something was bound; Depending on `prec` this is either
+ * imported by <tree>
+ * or defined in <symbol>
+ */
+ def bindingString(prec: Int, whereFound: Context, qualifier: String = "") =
+ if (prec == wildImport || prec == namedImport) {
+ ex"""imported$qualifier by ${hl"${whereFound.importInfo.toString}"}"""
+ } else
+ ex"""defined$qualifier in ${hl"${whereFound.owner.toString}"}"""
+
+ /** Check that any previously found result from an inner context
+ * does properly shadow the new one from an outer context.
+ * @param found The newly found result
+ * @param newPrec Its precedence
+ * @param scala2pkg Special mode where we check members of the same package, but defined
+ * in different compilation units under Scala2. If set, and the
+ * previous and new contexts do not have the same scope, we select
+ * the previous (inner) definition. This models what scalac does.
+ */
+ def checkNewOrShadowed(found: Type, newPrec: Int, scala2pkg: Boolean = false)(implicit ctx: Context): Type =
+ if (!previous.exists || ctx.typeComparer.isSameRef(previous, found)) found
+ else if ((prevCtx.scope eq ctx.scope) &&
+ (newPrec == definition ||
+ newPrec == namedImport && prevPrec == wildImport)) {
+ // special cases: definitions beat imports, and named imports beat
+ // wildcard imports, provided both are in contexts with same scope
+ found
+ }
+ else {
+ if (!scala2pkg && !previous.isError && !found.isError) {
+ error(
+ ex"""|reference to `$name` is ambiguous
+ |it is both ${bindingString(newPrec, ctx, "")}
+ |and ${bindingString(prevPrec, prevCtx, " subsequently")}""",
+ tree.pos)
+ }
+ previous
+ }
+
+ /** The type representing a named import with enclosing name when imported
+ * from given `site` and `selectors`.
+ */
+ def namedImportRef(site: Type, selectors: List[untpd.Tree])(implicit ctx: Context): Type = {
+ def checkUnambiguous(found: Type) = {
+ val other = namedImportRef(site, selectors.tail)
+ if (other.exists && found.exists && (found != other))
+ error(em"reference to `$name` is ambiguous; it is imported twice in ${ctx.tree}",
+ tree.pos)
+ found
+ }
+ val Name = name.toTermName.decode
+ selectors match {
+ case selector :: rest =>
+ selector match {
+ case Thicket(fromId :: Ident(Name) :: _) =>
+ val Ident(from) = fromId
+ val selName = if (name.isTypeName) from.toTypeName else from
+ // Pass refctx so that any errors are reported in the context of the
+ // reference instead of the context of the import.
+ checkUnambiguous(selectionType(site, selName, tree.pos)(refctx))
+ case Ident(Name) =>
+ checkUnambiguous(selectionType(site, name, tree.pos)(refctx))
+ case _ =>
+ namedImportRef(site, rest)
+ }
+ case nil =>
+ NoType
+ }
+ }
+
+ /** The type representing a wildcard import with enclosing name when imported
+ * from given import info
+ */
+ def wildImportRef(imp: ImportInfo)(implicit ctx: Context): Type = {
+ if (imp.isWildcardImport) {
+ val pre = imp.site
+ if (!isDisabled(imp, pre) && !(imp.excluded contains name.toTermName) && name != nme.CONSTRUCTOR) {
+ val denot = pre.member(name).accessibleFrom(pre)(refctx)
+ if (reallyExists(denot)) return pre.select(name, denot)
+ }
+ }
+ NoType
+ }
+
+ /** Is (some alternative of) the given predenotation `denot`
+ * defined in current compilation unit?
+ */
+ def isDefinedInCurrentUnit(denot: Denotation)(implicit ctx: Context): Boolean = denot match {
+ case MultiDenotation(d1, d2) => isDefinedInCurrentUnit(d1) || isDefinedInCurrentUnit(d2)
+ case denot: SingleDenotation => denot.symbol.sourceFile == ctx.source.file
+ }
+
+ /** Is `denot` the denotation of a self symbol? */
+ def isSelfDenot(denot: Denotation)(implicit ctx: Context) = denot match {
+ case denot: SymDenotation => denot is SelfName
+ case _ => false
+ }
+
+ /** Would import of kind `prec` be not shadowed by a nested higher-precedence definition? */
+ def isPossibleImport(prec: Int)(implicit ctx: Context) =
+ !noImports &&
+ (prevPrec < prec || prevPrec == prec && (prevCtx.scope eq ctx.scope))
+
+ @tailrec def loop(implicit ctx: Context): Type = {
+ if (ctx.scope == null) previous
+ else {
+ val outer = ctx.outer
+ var result: Type = NoType
+
+ // find definition
+ if ((ctx.scope ne outer.scope) || (ctx.owner ne outer.owner)) {
+ val defDenot = ctx.denotNamed(name)
+ if (qualifies(defDenot)) {
+ val curOwner = ctx.owner
+ val found =
+ if (isSelfDenot(defDenot)) curOwner.enclosingClass.thisType
+ else curOwner.thisType.select(name, defDenot)
+ if (!(curOwner is Package) || isDefinedInCurrentUnit(defDenot))
+ result = checkNewOrShadowed(found, definition) // no need to go further out, we found highest prec entry
+ else {
+ if (ctx.scala2Mode && !foundUnderScala2.exists)
+ foundUnderScala2 = checkNewOrShadowed(found, definition, scala2pkg = true)
+ if (defDenot.symbol is Package)
+ result = checkNewOrShadowed(previous orElse found, packageClause)
+ else if (prevPrec < packageClause)
+ result = findRef(found, packageClause, ctx)(outer)
+ }
+ }
+ }
+
+ if (result.exists) result
+ else { // find import
+ val curImport = ctx.importInfo
+ if (ctx.owner.is(Package) && curImport != null && curImport.isRootImport && previous.exists)
+ previous // no more conflicts possible in this case
+ else if (isPossibleImport(namedImport) && (curImport ne outer.importInfo) && !curImport.sym.isCompleting) {
+ val namedImp = namedImportRef(curImport.site, curImport.selectors)
+ if (namedImp.exists)
+ findRef(checkNewOrShadowed(namedImp, namedImport), namedImport, ctx)(outer)
+ else if (isPossibleImport(wildImport)) {
+ val wildImp = wildImportRef(curImport)
+ if (wildImp.exists)
+ findRef(checkNewOrShadowed(wildImp, wildImport), wildImport, ctx)(outer)
+ else loop(outer)
+ }
+ else loop(outer)
+ }
+ else loop(outer)
+ }
+ }
+ }
+
+ loop
+ }
+
+ // begin typedIdent
+ def kind = if (name.isTermName) "" else "type "
+ typr.println(s"typed ident $kind$name in ${ctx.owner}")
+ if (ctx.mode is Mode.Pattern) {
+ if (name == nme.WILDCARD)
+ return tree.withType(pt)
+ if (isVarPattern(tree) && name.isTermName)
+ return typed(desugar.patternVar(tree), pt)
+ }
+
+
+ val rawType = {
+ val saved1 = importedFromRoot
+ val saved2 = foundUnderScala2
+ importedFromRoot = Set.empty
+ foundUnderScala2 = NoType
+ try {
+ var found = findRef(NoType, BindingPrec.nothingBound, NoContext)
+ if (foundUnderScala2.exists && !(foundUnderScala2 =:= found)) {
+ ctx.migrationWarning(
+ ex"""Name resolution will change.
+ | currently selected : $foundUnderScala2
+ | in the future, without -language:Scala2: $found""", tree.pos)
+ found = foundUnderScala2
+ }
+ found
+ }
+ finally {
+ importedFromRoot = saved1
+ foundUnderScala2 = saved2
+ }
+ }
+
+ val ownType =
+ if (rawType.exists)
+ ensureAccessible(rawType, superAccess = false, tree.pos)
+ else {
+ error(new MissingIdent(tree, kind, name.show), tree.pos)
+ ErrorType
+ }
+
+ val tree1 = ownType match {
+ case ownType: NamedType if !prefixIsElidable(ownType) =>
+ ref(ownType).withPos(tree.pos)
+ case _ =>
+ tree.withType(ownType)
+ }
+
+ checkValue(tree1, pt)
+ }
+
+ private def typedSelect(tree: untpd.Select, pt: Type, qual: Tree)(implicit ctx: Context): Select =
+ healNonvariant(
+ checkValue(assignType(cpy.Select(tree)(qual, tree.name), qual), pt),
+ pt)
+
+ /** Let `tree = p.n` where `p: T`. If tree's type is an unsafe instantiation
+ * (see TypeOps#asSeenFrom for how this can happen), rewrite the prefix `p`
+ * to `(p: <unknown skolem of type T>)` and try again with the new (stable)
+ * prefix. If the result has another unsafe instantiation, raise an error.
+ */
+ private def healNonvariant[T <: Tree](tree: T, pt: Type)(implicit ctx: Context): T =
+ if (ctx.unsafeNonvariant == ctx.runId && tree.tpe.widen.hasUnsafeNonvariant)
+ tree match {
+ case tree @ Select(qual, _) if !qual.tpe.isStable =>
+ val alt = typedSelect(tree, pt, Typed(qual, TypeTree(SkolemType(qual.tpe.widen))))
+ typr.println(i"healed type: ${tree.tpe} --> $alt")
+ alt.asInstanceOf[T]
+ case _ =>
+ ctx.error(ex"unsafe instantiation of type ${tree.tpe}", tree.pos)
+ tree
+ }
+ else tree
+
+ def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = track("typedSelect") {
+ def typeSelectOnTerm(implicit ctx: Context): Tree = {
+ val qual1 = typedExpr(tree.qualifier, selectionProto(tree.name, pt, this))
+ if (tree.name.isTypeName) checkStable(qual1.tpe, qual1.pos)
+ val select = typedSelect(tree, pt, qual1)
+ if (select.tpe ne TryDynamicCallType) select
+ else if (pt.isInstanceOf[PolyProto] || pt.isInstanceOf[FunProto] || pt == AssignProto) select
+ else typedDynamicSelect(tree, Nil, pt)
+ }
+
+ def typeSelectOnType(qual: untpd.Tree)(implicit ctx: Context) =
+ typedSelect(untpd.cpy.Select(tree)(qual, tree.name.toTypeName), pt)
+
+ def tryJavaSelectOnType(implicit ctx: Context): Tree = tree.qualifier match {
+ case Select(qual, name) => typeSelectOnType(untpd.Select(qual, name.toTypeName))
+ case Ident(name) => typeSelectOnType(untpd.Ident(name.toTypeName))
+ case _ => errorTree(tree, "cannot convert to type selection") // will never be printed due to fallback
+ }
+
+ def selectWithFallback(fallBack: Context => Tree) =
+ tryAlternatively(typeSelectOnTerm(_))(fallBack)
+
+ if (tree.qualifier.isType) {
+ val qual1 = typedType(tree.qualifier, selectionProto(tree.name, pt, this))
+ assignType(cpy.Select(tree)(qual1, tree.name), qual1)
+ }
+ else if (ctx.compilationUnit.isJava && tree.name.isTypeName)
+ // SI-3120 Java uses the same syntax, A.B, to express selection from the
+ // value A and from the type A. We have to try both.
+ selectWithFallback(tryJavaSelectOnType(_)) // !!! possibly exponential bcs of qualifier retyping
+ else if (tree.name == nme.withFilter && tree.getAttachment(desugar.MaybeFilter).isDefined)
+ selectWithFallback {
+ implicit ctx =>
+ typedSelect(untpd.cpy.Select(tree)(tree.qualifier, nme.filter), pt) // !!! possibly exponential bcs of qualifier retyping
+ }
+ else
+ typeSelectOnTerm(ctx)
+ }
+
+ def typedThis(tree: untpd.This)(implicit ctx: Context): Tree = track("typedThis") {
+ assignType(tree)
+ }
+
+ def typedSuper(tree: untpd.Super, pt: Type)(implicit ctx: Context): Tree = track("typedSuper") {
+ val qual1 = typed(tree.qual)
+ val inConstrCall = pt match {
+ case pt: SelectionProto if pt.name == nme.CONSTRUCTOR => true
+ case _ => false
+ }
+ pt match {
+ case pt: SelectionProto if pt.name.isTypeName =>
+ qual1 // don't do super references for types; they are meaningless anyway
+ case _ =>
+ assignType(cpy.Super(tree)(qual1, tree.mix), qual1, inConstrCall)
+ }
+ }
+
+ def typedLiteral(tree: untpd.Literal)(implicit ctx: Context) = track("typedLiteral") {
+ assignType(tree)
+ }
+
+ def typedNew(tree: untpd.New, pt: Type)(implicit ctx: Context) = track("typedNew") {
+ tree.tpt match {
+ case templ: untpd.Template =>
+ import untpd._
+ val x = tpnme.ANON_CLASS
+ val clsDef = TypeDef(x, templ).withFlags(Final)
+ typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt)
+ case _ =>
+ var tpt1 = typedType(tree.tpt)
+ tpt1 = tpt1.withType(ensureAccessible(tpt1.tpe, superAccess = false, tpt1.pos))
+ tpt1.tpe.dealias match {
+ case TypeApplications.EtaExpansion(tycon) => tpt1 = tpt1.withType(tycon)
+ case _ =>
+ }
+ checkClassType(tpt1.tpe, tpt1.pos, traitReq = false, stablePrefixReq = true)
+
+ tpt1 match {
+ case AppliedTypeTree(_, targs) =>
+ for (targ @ TypeBoundsTree(_, _) <- targs)
+ ctx.error("type argument must be fully defined", targ.pos)
+ case _ =>
+ }
+
+ assignType(cpy.New(tree)(tpt1), tpt1)
+ // todo in a later phase: checkInstantiatable(cls, tpt1.pos)
+ }
+ }
+
+ def typedTyped(tree: untpd.Typed, pt: Type)(implicit ctx: Context): Tree = track("typedTyped") {
+ /* Handles three cases:
+ * @param ifPat how to handle a pattern (_: T)
+ * @param ifExpr how to handle an expression (e: T)
+ * @param wildName what name `w` to use in the rewriting of
+ * (x: T) to (x @ (w: T)). This is either `_` or `_*`.
+ */
+ def cases(ifPat: => Tree, ifExpr: => Tree, wildName: TermName) = tree.expr match {
+ case id: untpd.Ident if (ctx.mode is Mode.Pattern) && isVarPattern(id) =>
+ if (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) ifPat
+ else {
+ import untpd._
+ typed(Bind(id.name, Typed(Ident(wildName), tree.tpt)).withPos(id.pos), pt)
+ }
+ case _ => ifExpr
+ }
+ def ascription(tpt: Tree, isWildcard: Boolean) = {
+ val underlyingTreeTpe =
+ if (isRepeatedParamType(tpt)) TypeTree(defn.SeqType.appliedTo(pt :: Nil))
+ else tpt
+
+ val expr1 =
+ if (isRepeatedParamType(tpt)) tree.expr.withType(defn.SeqType.appliedTo(pt :: Nil))
+ else if (isWildcard) tree.expr.withType(tpt.tpe)
+ else typed(tree.expr, tpt.tpe.widenSkolem)
+ assignType(cpy.Typed(tree)(expr1, tpt), underlyingTreeTpe)
+ }
+ if (untpd.isWildcardStarArg(tree))
+ cases(
+ ifPat = ascription(TypeTree(defn.RepeatedParamType.appliedTo(pt)), isWildcard = true),
+ ifExpr = seqToRepeated(typedExpr(tree.expr, defn.SeqType)),
+ wildName = nme.WILDCARD_STAR)
+ else {
+ def typedTpt = checkSimpleKinded(typedType(tree.tpt))
+ def handlePattern: Tree = {
+ val tpt1 = typedTpt
+ // special case for an abstract type that comes with a class tag
+ tpt1.tpe.dealias match {
+ case tref: TypeRef if !tref.symbol.isClass && !ctx.isAfterTyper =>
+ inferImplicit(defn.ClassTagType.appliedTo(tref),
+ EmptyTree, tpt1.pos)(ctx.retractMode(Mode.Pattern)) match {
+ case SearchSuccess(arg, _, _) =>
+ return typed(untpd.Apply(untpd.TypedSplice(arg), tree.expr), pt)
+ case _ =>
+ }
+ case _ =>
+ if (!ctx.isAfterTyper) tpt1.tpe.<:<(pt)(ctx.addMode(Mode.GADTflexible))
+ }
+ ascription(tpt1, isWildcard = true)
+ }
+ cases(
+ ifPat = handlePattern,
+ ifExpr = ascription(typedTpt, isWildcard = false),
+ wildName = nme.WILDCARD)
+ }
+ }
+
+ def typedNamedArg(tree: untpd.NamedArg, pt: Type)(implicit ctx: Context) = track("typedNamedArg") {
+ val arg1 = typed(tree.arg, pt)
+ assignType(cpy.NamedArg(tree)(tree.name, arg1), arg1)
+ }
+
+ def typedAssign(tree: untpd.Assign, pt: Type)(implicit ctx: Context) = track("typedAssign") {
+ tree.lhs match {
+ case lhs @ Apply(fn, args) =>
+ typed(cpy.Apply(lhs)(untpd.Select(fn, nme.update), args :+ tree.rhs), pt)
+ case untpd.TypedSplice(Apply(MaybePoly(Select(fn, app), targs), args)) if app == nme.apply =>
+ val rawUpdate: untpd.Tree = untpd.Select(untpd.TypedSplice(fn), nme.update)
+ val wrappedUpdate =
+ if (targs.isEmpty) rawUpdate
+ else untpd.TypeApply(rawUpdate, targs map (untpd.TypedSplice(_)))
+ val appliedUpdate = cpy.Apply(fn)(wrappedUpdate, (args map (untpd.TypedSplice(_))) :+ tree.rhs)
+ typed(appliedUpdate, pt)
+ case lhs =>
+ val lhsCore = typedUnadapted(lhs, AssignProto)
+ def lhs1 = typed(untpd.TypedSplice(lhsCore))
+ def canAssign(sym: Symbol) = // allow assignments from the primary constructor to class fields
+ sym.is(Mutable, butNot = Accessor) ||
+ ctx.owner.isPrimaryConstructor && !sym.is(Method) && sym.owner == ctx.owner.owner ||
+ ctx.owner.name.isTraitSetterName || ctx.owner.isStaticConstructor
+ lhsCore.tpe match {
+ case ref: TermRef if canAssign(ref.symbol) =>
+ assignType(cpy.Assign(tree)(lhs1, typed(tree.rhs, ref.info)))
+ case _ =>
+ def reassignmentToVal =
+ errorTree(cpy.Assign(tree)(lhsCore, typed(tree.rhs, lhs1.tpe.widen)),
+ "reassignment to val")
+ lhsCore.tpe match {
+ case ref: TermRef => // todo: further conditions to impose on getter?
+ val pre = ref.prefix
+ val setterName = ref.name.setterName
+ val setter = pre.member(setterName)
+ lhsCore match {
+ case lhsCore: RefTree if setter.exists =>
+ val setterTypeRaw = pre.select(setterName, setter)
+ val setterType = ensureAccessible(setterTypeRaw, isSuperSelection(lhsCore), tree.pos)
+ val lhs2 = healNonvariant(
+ untpd.rename(lhsCore, setterName).withType(setterType), WildcardType)
+ typedUnadapted(cpy.Apply(tree)(untpd.TypedSplice(lhs2), tree.rhs :: Nil))
+ case _ =>
+ reassignmentToVal
+ }
+ case TryDynamicCallType =>
+ typedDynamicAssign(tree, pt)
+ case tpe =>
+ reassignmentToVal
+ }
+ }
+ }
+ }
+
+ def typedBlockStats(stats: List[untpd.Tree])(implicit ctx: Context): (Context, List[tpd.Tree]) =
+ (index(stats), typedStats(stats, ctx.owner))
+
+ def typedBlock(tree: untpd.Block, pt: Type)(implicit ctx: Context) = track("typedBlock") {
+ val (exprCtx, stats1) = typedBlockStats(tree.stats)
+ val ept =
+ if (tree.isInstanceOf[untpd.InfixOpBlock])
+ // Right-binding infix operations are expanded to InfixBlocks, which may be followed by arguments.
+ // Example: `(a /: bs)(op)` expands to `{ val x = a; bs./:(x) } (op)` where `{...}` is an InfixBlock.
+ pt
+ else pt.notApplied
+ val expr1 = typedExpr(tree.expr, ept)(exprCtx)
+ ensureNoLocalRefs(
+ assignType(cpy.Block(tree)(stats1, expr1), stats1, expr1), pt, localSyms(stats1))
+ }
+
+ def escapingRefs(block: Tree, localSyms: => List[Symbol])(implicit ctx: Context): collection.Set[NamedType] = {
+ lazy val locals = localSyms.toSet
+ block.tpe namedPartsWith (tp => locals.contains(tp.symbol))
+ }
+
+ /** Check that expression's type can be expressed without references to locally defined
+ * symbols. The following two remedies are tried before giving up:
+ * 1. If the expected type of the expression is fully defined, pick it as the
+ * type of the result expressed by adding a type ascription.
+ * 2. If (1) fails, force all type variables so that the block's type is
+ * fully defined and try again.
+ */
+ protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol], forcedDefined: Boolean = false)(implicit ctx: Context): Tree = {
+ def ascribeType(tree: Tree, pt: Type): Tree = tree match {
+ case block @ Block(stats, expr) =>
+ val expr1 = ascribeType(expr, pt)
+ cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant
+ case _ =>
+ Typed(tree, TypeTree(pt.simplified))
+ }
+ val leaks = escapingRefs(tree, localSyms)
+ if (leaks.isEmpty) tree
+ else if (isFullyDefined(pt, ForceDegree.none)) ascribeType(tree, pt)
+ else if (!forcedDefined) {
+ fullyDefinedType(tree.tpe, "block", tree.pos)
+ val tree1 = ascribeType(tree, avoid(tree.tpe, localSyms))
+ ensureNoLocalRefs(tree1, pt, localSyms, forcedDefined = true)
+ } else
+ errorTree(tree,
+ em"local definition of ${leaks.head.name} escapes as part of expression's type ${tree.tpe}"/*; full type: ${result.tpe.toString}"*/)
+ }
+
+ def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context): Tree = track("typedIf") {
+ val cond1 = typed(tree.cond, defn.BooleanType)
+ val thenp1 = typed(tree.thenp, pt.notApplied)
+ val elsep1 = typed(tree.elsep orElse (untpd.unitLiteral withPos tree.pos), pt.notApplied)
+ val thenp2 :: elsep2 :: Nil = harmonize(thenp1 :: elsep1 :: Nil)
+ assignType(cpy.If(tree)(cond1, thenp2, elsep2), thenp2, elsep2)
+ }
+
+ private def decomposeProtoFunction(pt: Type, defaultArity: Int)(implicit ctx: Context): (List[Type], Type) = pt match {
+ case _ if defn.isFunctionType(pt) =>
+ // if expected parameter type(s) are wildcards, approximate from below.
+ // if expected result type is a wildcard, approximate from above.
+ // this can type the greatest set of admissible closures.
+ (pt.dealias.argTypesLo.init, pt.dealias.argTypesHi.last)
+ case SAMType(meth) =>
+ val mt @ MethodType(_, paramTypes) = meth.info
+ (paramTypes, mt.resultType)
+ case _ =>
+ (List.range(0, defaultArity) map alwaysWildcardType, WildcardType)
+ }
+
+ def typedFunction(tree: untpd.Function, pt: Type)(implicit ctx: Context) = track("typedFunction") {
+ val untpd.Function(args, body) = tree
+ if (ctx.mode is Mode.Type)
+ typed(cpy.AppliedTypeTree(tree)(
+ untpd.TypeTree(defn.FunctionClass(args.length).typeRef), args :+ body), pt)
+ else {
+ val params = args.asInstanceOf[List[untpd.ValDef]]
+
+ pt match {
+ case pt: TypeVar if untpd.isFunctionWithUnknownParamType(tree) =>
+ // try to instantiate `pt` if this is possible. If it does not
+ // work the error will be reported later in `inferredParam`,
+ // when we try to infer the parameter type.
+ isFullyDefined(pt, ForceDegree.noBottom)
+ case _ =>
+ }
+
+ val (protoFormals, protoResult) = decomposeProtoFunction(pt, params.length)
+
+ def refersTo(arg: untpd.Tree, param: untpd.ValDef): Boolean = arg match {
+ case Ident(name) => name == param.name
+ case _ => false
+ }
+
+ /** The function body to be returned in the closure. Can become a TypedSplice
+ * of a typed expression if this is necessary to infer a parameter type.
+ */
+ var fnBody = tree.body
+
+ /** If function is of the form
+ * (x1, ..., xN) => f(x1, ..., XN)
+ * the type of `f`, otherwise NoType. (updates `fnBody` as a side effect).
+ */
+ def calleeType: Type = fnBody match {
+ case Apply(expr, args) if (args corresponds params)(refersTo) =>
+ expr match {
+ case untpd.TypedSplice(expr1) =>
+ expr1.tpe
+ case _ =>
+ val protoArgs = args map (_ withType WildcardType)
+ val callProto = FunProto(protoArgs, WildcardType, this)
+ val expr1 = typedExpr(expr, callProto)
+ fnBody = cpy.Apply(fnBody)(untpd.TypedSplice(expr1), args)
+ expr1.tpe
+ }
+ case _ =>
+ NoType
+ }
+
+ /** Two attempts: First, if expected type is fully defined pick this one.
+ * Second, if function is of the form
+ * (x1, ..., xN) => f(x1, ..., XN)
+ * and f has a method type MT, pick the corresponding parameter type in MT,
+ * if this one is fully defined.
+ * If both attempts fail, issue a "missing parameter type" error.
+ */
+ def inferredParamType(param: untpd.ValDef, formal: Type): Type = {
+ if (isFullyDefined(formal, ForceDegree.noBottom)) return formal
+ calleeType.widen match {
+ case mtpe: MethodType =>
+ val pos = params indexWhere (_.name == param.name)
+ if (pos < mtpe.paramTypes.length) {
+ val ptype = mtpe.paramTypes(pos)
+ if (isFullyDefined(ptype, ForceDegree.noBottom)) return ptype
+ }
+ case _ =>
+ }
+ val ofFun =
+ if (nme.syntheticParamNames(args.length + 1) contains param.name)
+ i" of expanded function $tree"
+ else
+ ""
+ errorType(i"missing parameter type for parameter ${param.name}$ofFun, expected = $pt", param.pos)
+ }
+
+ def protoFormal(i: Int): Type =
+ if (protoFormals.length == params.length) protoFormals(i)
+ else errorType(i"wrong number of parameters, expected: ${protoFormals.length}", tree.pos)
+
+ /** Is `formal` a product type which is elementwise compatible with `params`? */
+ def ptIsCorrectProduct(formal: Type) = {
+ val pclass = defn.ProductNType(params.length).symbol
+ isFullyDefined(formal, ForceDegree.noBottom) &&
+ formal.derivesFrom(pclass) &&
+ formal.baseArgTypes(pclass).corresponds(params) {
+ (argType, param) =>
+ param.tpt.isEmpty || argType <:< typedAheadType(param.tpt).tpe
+ }
+ }
+
+ val desugared =
+ if (protoFormals.length == 1 && params.length != 1 && ptIsCorrectProduct(protoFormals.head)) {
+ desugar.makeTupledFunction(params, fnBody)
+ }
+ else {
+ val inferredParams: List[untpd.ValDef] =
+ for ((param, i) <- params.zipWithIndex) yield
+ if (!param.tpt.isEmpty) param
+ else cpy.ValDef(param)(
+ tpt = untpd.TypeTree(
+ inferredParamType(param, protoFormal(i)).underlyingIfRepeated(isJava = false)))
+
+ // Define result type of closure as the expected type, thereby pushing
+ // down any implicit searches. We do this even if the expected type is not fully
+ // defined, which is a bit of a hack. But it's needed to make the following work
+ // (see typers.scala and printers/PlainPrinter.scala for examples).
+ //
+ // def double(x: Char): String = s"$x$x"
+ // "abc" flatMap double
+ //
+ val resultTpt = protoResult match {
+ case WildcardType(_) => untpd.TypeTree()
+ case _ => untpd.TypeTree(protoResult)
+ }
+ val inlineable = pt.hasAnnotation(defn.InlineParamAnnot)
+ desugar.makeClosure(inferredParams, fnBody, resultTpt, inlineable)
+ }
+ typed(desugared, pt)
+ }
+ }
+
+ def typedClosure(tree: untpd.Closure, pt: Type)(implicit ctx: Context): Tree = track("typedClosure") {
+ val env1 = tree.env mapconserve (typed(_))
+ val meth1 = typedUnadapted(tree.meth)
+ val target =
+ if (tree.tpt.isEmpty)
+ meth1.tpe.widen match {
+ case mt: MethodType =>
+ pt match {
+ case SAMType(meth) if !defn.isFunctionType(pt) && mt <:< meth.info =>
+ if (!isFullyDefined(pt, ForceDegree.all))
+ ctx.error(ex"result type of closure is an underspecified SAM type $pt", tree.pos)
+ TypeTree(pt)
+ case _ =>
+ if (!mt.isDependent) EmptyTree
+ else throw new java.lang.Error(i"internal error: cannot turn dependent method type $mt into closure, position = ${tree.pos}, raw type = ${mt.toString}") // !!! DEBUG. Eventually, convert to an error?
+ }
+ case tp =>
+ throw new java.lang.Error(i"internal error: closing over non-method $tp, pos = ${tree.pos}")
+ }
+ else typed(tree.tpt)
+ //println(i"typing closure $tree : ${meth1.tpe.widen}")
+ assignType(cpy.Closure(tree)(env1, meth1, target), meth1, target)
+ }
+
+ def typedMatch(tree: untpd.Match, pt: Type)(implicit ctx: Context) = track("typedMatch") {
+ tree.selector match {
+ case EmptyTree =>
+ val (protoFormals, _) = decomposeProtoFunction(pt, 1)
+ val unchecked = pt <:< defn.PartialFunctionType
+ typed(desugar.makeCaseLambda(tree.cases, protoFormals.length, unchecked) withPos tree.pos, pt)
+ case _ =>
+ val sel1 = typedExpr(tree.selector)
+ val selType = widenForMatchSelector(
+ fullyDefinedType(sel1.tpe, "pattern selector", tree.pos))
+
+ val cases1 = typedCases(tree.cases, selType, pt.notApplied)
+ val cases2 = harmonize(cases1).asInstanceOf[List[CaseDef]]
+ assignType(cpy.Match(tree)(sel1, cases2), cases2)
+ }
+ }
+
+ def typedCases(cases: List[untpd.CaseDef], selType: Type, pt: Type)(implicit ctx: Context) = {
+
+ /** gadtSyms = "all type parameters of enclosing methods that appear
+ * non-variantly in the selector type" todo: should typevars
+ * which appear with variances +1 and -1 (in different
+ * places) be considered as well?
+ */
+ val gadtSyms: Set[Symbol] = ctx.traceIndented(i"GADT syms of $selType", gadts) {
+ val accu = new TypeAccumulator[Set[Symbol]] {
+ def apply(tsyms: Set[Symbol], t: Type): Set[Symbol] = {
+ val tsyms1 = t match {
+ case tr: TypeRef if (tr.symbol is TypeParam) && tr.symbol.owner.isTerm && variance == 0 =>
+ tsyms + tr.symbol
+ case _ =>
+ tsyms
+ }
+ foldOver(tsyms1, t)
+ }
+ }
+ accu(Set.empty, selType)
+ }
+
+ cases mapconserve (typedCase(_, pt, selType, gadtSyms))
+ }
+
+ /** Type a case. Overridden in ReTyper, that's why it's separate from
+ * typedCases.
+ */
+ def typedCase(tree: untpd.CaseDef, pt: Type, selType: Type, gadtSyms: Set[Symbol])(implicit ctx: Context): CaseDef = track("typedCase") {
+ val originalCtx = ctx
+
+ /** - replace all references to symbols associated with wildcards by their GADT bounds
+ * - enter all symbols introduced by a Bind in current scope
+ */
+ val indexPattern = new TreeMap {
+ val elimWildcardSym = new TypeMap {
+ def apply(t: Type) = t match {
+ case ref @ TypeRef(_, tpnme.WILDCARD) if ctx.gadt.bounds.contains(ref.symbol) =>
+ ctx.gadt.bounds(ref.symbol)
+ case TypeAlias(ref @ TypeRef(_, tpnme.WILDCARD)) if ctx.gadt.bounds.contains(ref.symbol) =>
+ ctx.gadt.bounds(ref.symbol)
+ case _ =>
+ mapOver(t)
+ }
+ }
+ override def transform(trt: Tree)(implicit ctx: Context) =
+ super.transform(trt.withType(elimWildcardSym(trt.tpe))) match {
+ case b: Bind =>
+ if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(b.symbol)
+ else ctx.error(new DuplicateBind(b, tree), b.pos)
+ b.symbol.info = elimWildcardSym(b.symbol.info)
+ b
+ case t => t
+ }
+ }
+
+ def caseRest(pat: Tree)(implicit ctx: Context) = {
+ val pat1 = indexPattern.transform(pat)
+ val guard1 = typedExpr(tree.guard, defn.BooleanType)
+ val body1 = ensureNoLocalRefs(typedExpr(tree.body, pt), pt, ctx.scope.toList)
+ .ensureConforms(pt)(originalCtx) // insert a cast if body does not conform to expected type if we disregard gadt bounds
+ assignType(cpy.CaseDef(tree)(pat1, guard1, body1), body1)
+ }
+
+ val gadtCtx =
+ if (gadtSyms.isEmpty) ctx
+ else {
+ val c = ctx.fresh.setFreshGADTBounds
+ for (sym <- gadtSyms)
+ if (!c.gadt.bounds.contains(sym))
+ c.gadt.setBounds(sym, TypeBounds.empty)
+ c
+ }
+ val pat1 = typedPattern(tree.pat, selType)(gadtCtx)
+ caseRest(pat1)(gadtCtx.fresh.setNewScope)
+ }
+
+ def typedReturn(tree: untpd.Return)(implicit ctx: Context): Return = track("typedReturn") {
+ def returnProto(owner: Symbol, locals: Scope): Type =
+ if (owner.isConstructor) defn.UnitType
+ else owner.info match {
+ case info: PolyType =>
+ val tparams = locals.toList.takeWhile(_ is TypeParam)
+ assert(info.paramNames.length == tparams.length,
+ i"return mismatch from $owner, tparams = $tparams, locals = ${locals.toList}%, %")
+ info.instantiate(tparams.map(_.typeRef)).finalResultType
+ case info =>
+ info.finalResultType
+ }
+ def enclMethInfo(cx: Context): (Tree, Type) = {
+ val owner = cx.owner
+ if (cx == NoContext || owner.isType) {
+ ctx.error("return outside method definition", tree.pos)
+ (EmptyTree, WildcardType)
+ }
+ else if (owner != cx.outer.owner && owner.isRealMethod) {
+ if (owner.isInlineMethod)
+ (EmptyTree, errorType(em"no explicit return allowed from inline $owner", tree.pos))
+ else if (!owner.isCompleted)
+ (EmptyTree, errorType(em"$owner has return statement; needs result type", tree.pos))
+ else {
+ val from = Ident(TermRef(NoPrefix, owner.asTerm))
+ val proto = returnProto(owner, cx.scope)
+ (from, proto)
+ }
+ }
+ else enclMethInfo(cx.outer)
+ }
+ val (from, proto) =
+ if (tree.from.isEmpty) enclMethInfo(ctx)
+ else {
+ val from = tree.from.asInstanceOf[tpd.Tree]
+ val proto =
+ if (ctx.erasedTypes) from.symbol.info.finalResultType
+ else WildcardType // We cannot reliably detect the internal type view of polymorphic or dependent methods
+ // because we do not know the internal type params and method params.
+ // Hence no adaptation is possible, and we assume WildcardType as prototype.
+ (from, proto)
+ }
+ val expr1 = typedExpr(tree.expr orElse untpd.unitLiteral.withPos(tree.pos), proto)
+ assignType(cpy.Return(tree)(expr1, from))
+ }
+
+ def typedTry(tree: untpd.Try, pt: Type)(implicit ctx: Context): Try = track("typedTry") {
+ val expr1 = typed(tree.expr, pt.notApplied)
+ val cases1 = typedCases(tree.cases, defn.ThrowableType, pt.notApplied)
+ val finalizer1 = typed(tree.finalizer, defn.UnitType)
+ val expr2 :: cases2x = harmonize(expr1 :: cases1)
+ val cases2 = cases2x.asInstanceOf[List[CaseDef]]
+ assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2)
+ }
+
+ def typedThrow(tree: untpd.Throw)(implicit ctx: Context): Tree = track("typedThrow") {
+ val expr1 = typed(tree.expr, defn.ThrowableType)
+ Throw(expr1).withPos(tree.pos)
+ }
+
+ def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(implicit ctx: Context): SeqLiteral = track("typedSeqLiteral") {
+ val proto1 = pt.elemType match {
+ case NoType => WildcardType
+ case bounds: TypeBounds => WildcardType(bounds)
+ case elemtp => elemtp
+ }
+ val elems1 = tree.elems mapconserve (typed(_, proto1))
+ val proto2 = // the computed type of the `elemtpt` field
+ if (!tree.elemtpt.isEmpty) WildcardType
+ else if (isFullyDefined(proto1, ForceDegree.none)) proto1
+ else if (tree.elems.isEmpty && tree.isInstanceOf[Trees.JavaSeqLiteral[_]])
+ defn.ObjectType // generic empty Java varargs are of type Object[]
+ else ctx.typeComparer.lub(elems1.tpes)
+ val elemtpt1 = typed(tree.elemtpt, proto2)
+ assignType(cpy.SeqLiteral(tree)(elems1, elemtpt1), elems1, elemtpt1)
+ }
+
+ def typedInlined(tree: untpd.Inlined, pt: Type)(implicit ctx: Context): Inlined = {
+ val (exprCtx, bindings1) = typedBlockStats(tree.bindings)
+ val expansion1 = typed(tree.expansion, pt)(inlineContext(tree.call)(exprCtx))
+ assignType(cpy.Inlined(tree)(tree.call, bindings1.asInstanceOf[List[MemberDef]], expansion1),
+ bindings1, expansion1)
+ }
+
+ def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): TypeTree = track("typedTypeTree") {
+ tree match {
+ case tree: untpd.DerivedTypeTree =>
+ tree.ensureCompletions
+ try
+ TypeTree(tree.derivedType(tree.attachment(untpd.OriginalSymbol))) withPos tree.pos
+ // btw, no need to remove the attachment. The typed
+ // tree is different from the untyped one, so the
+ // untyped tree is no longer accessed after all
+ // accesses with typedTypeTree are done.
+ catch {
+ case ex: NoSuchElementException =>
+ println(s"missing OriginalSymbol for ${ctx.owner.ownersIterator.toList}")
+ throw ex
+ }
+ case _ =>
+ assert(isFullyDefined(pt, ForceDegree.none))
+ tree.withType(pt)
+ }
+ }
+
+ def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(implicit ctx: Context): SingletonTypeTree = track("typedSingletonTypeTree") {
+ val ref1 = typedExpr(tree.ref)
+ checkStable(ref1.tpe, tree.pos)
+ assignType(cpy.SingletonTypeTree(tree)(ref1), ref1)
+ }
+
+ def typedAndTypeTree(tree: untpd.AndTypeTree)(implicit ctx: Context): AndTypeTree = track("typedAndTypeTree") {
+ val left1 = typed(tree.left)
+ val right1 = typed(tree.right)
+ assignType(cpy.AndTypeTree(tree)(left1, right1), left1, right1)
+ }
+
+ def typedOrTypeTree(tree: untpd.OrTypeTree)(implicit ctx: Context): OrTypeTree = track("typedOrTypeTree") {
+ val where = "in a union type"
+ val left1 = checkNotSingleton(typed(tree.left), where)
+ val right1 = checkNotSingleton(typed(tree.right), where)
+ assignType(cpy.OrTypeTree(tree)(left1, right1), left1, right1)
+ }
+
+ def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(implicit ctx: Context): RefinedTypeTree = track("typedRefinedTypeTree") {
+ val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt)
+ val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements)
+ val refineCls = createSymbol(refineClsDef).asClass
+ val TypeDef(_, impl: Template) = typed(refineClsDef)
+ val refinements1 = impl.body
+ assert(tree.refinements.length == refinements1.length, s"${tree.refinements} != $refinements1")
+ val seen = mutable.Set[Symbol]()
+ for (refinement <- refinements1) { // TODO: get clarity whether we want to enforce these conditions
+ typr.println(s"adding refinement $refinement")
+ checkRefinementNonCyclic(refinement, refineCls, seen)
+ val rsym = refinement.symbol
+ if (rsym.is(Method) && rsym.allOverriddenSymbols.isEmpty)
+ ctx.error(i"refinement $rsym without matching type in parent $tpt1", refinement.pos)
+ }
+ assignType(cpy.RefinedTypeTree(tree)(tpt1, refinements1), tpt1, refinements1, refineCls)
+ }
+
+ def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") {
+ val tpt1 = typed(tree.tpt, AnyTypeConstructorProto)(ctx.retractMode(Mode.Pattern))
+ val tparams = tpt1.tpe.typeParams
+ if (tparams.isEmpty) {
+ ctx.error(ex"${tpt1.tpe} does not take type parameters", tree.pos)
+ tpt1
+ }
+ else {
+ var args = tree.args
+ val args1 =
+ if (hasNamedArg(args)) typedNamedArgs(args)
+ else {
+ if (args.length != tparams.length) {
+ wrongNumberOfArgs(tpt1.tpe, "type", tparams, args, tree.pos)
+ args = args.take(tparams.length)
+ }
+ def typedArg(arg: untpd.Tree, tparam: TypeParamInfo) = {
+ val (desugaredArg, argPt) =
+ if (ctx.mode is Mode.Pattern)
+ (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.paramBounds)
+ else
+ (arg, WildcardType)
+ typed(desugaredArg, argPt)
+ }
+ args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]]
+ }
+ // check that arguments conform to bounds is done in phase PostTyper
+ assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1)
+ }
+ }
+
+ def typedPolyTypeTree(tree: untpd.PolyTypeTree)(implicit ctx: Context): Tree = track("typedPolyTypeTree") {
+ val PolyTypeTree(tparams, body) = tree
+ index(tparams)
+ val tparams1 = tparams.mapconserve(typed(_).asInstanceOf[TypeDef])
+ val body1 = typedType(tree.body)
+ assignType(cpy.PolyTypeTree(tree)(tparams1, body1), tparams1, body1)
+ }
+
+ def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") {
+ val result1 = typed(tree.result)
+ assignType(cpy.ByNameTypeTree(tree)(result1), result1)
+ }
+
+ /** Define a new symbol associated with a Bind or pattern wildcard and
+ * make it gadt narrowable.
+ */
+ private def newPatternBoundSym(name: Name, info: Type, pos: Position)(implicit ctx: Context) = {
+ val flags = if (name.isTypeName) BindDefinedType else EmptyFlags
+ val sym = ctx.newSymbol(ctx.owner, name, flags | Case, info, coord = pos)
+ if (name.isTypeName) ctx.gadt.setBounds(sym, info.bounds)
+ sym
+ }
+
+ def typedTypeBoundsTree(tree: untpd.TypeBoundsTree)(implicit ctx: Context): TypeBoundsTree = track("typedTypeBoundsTree") {
+ val TypeBoundsTree(lo, hi) = desugar.typeBoundsTree(tree)
+ val lo1 = typed(lo)
+ val hi1 = typed(hi)
+ val tree1 = assignType(cpy.TypeBoundsTree(tree)(lo1, hi1), lo1, hi1)
+ if (ctx.mode.is(Mode.Pattern)) {
+ // Associate a pattern-bound type symbol with the wildcard.
+ // The bounds of the type symbol can be constrained when comparing a pattern type
+ // with an expected type in typedTyped. The type symbol is eliminated once
+ // the enclosing pattern has been typechecked; see `indexPattern` in `typedCase`.
+ val wildcardSym = newPatternBoundSym(tpnme.WILDCARD, tree1.tpe, tree.pos)
+ tree1.withType(wildcardSym.typeRef)
+ }
+ else tree1
+ }
+
+ def typedBind(tree: untpd.Bind, pt: Type)(implicit ctx: Context): Tree = track("typedBind") {
+ val pt1 = fullyDefinedType(pt, "pattern variable", tree.pos)
+ val body1 = typed(tree.body, pt1)
+ typr.println(i"typed bind $tree pt = $pt1 bodytpe = ${body1.tpe}")
+ body1 match {
+ case UnApply(fn, Nil, arg :: Nil) if tree.body.isInstanceOf[untpd.Typed] =>
+ // A typed pattern `x @ (_: T)` with an implicit `ctag: ClassTag[T]`
+ // was rewritten to `x @ ctag(_)`.
+ // Rewrite further to `ctag(x @ _)`
+ assert(fn.symbol.owner == defn.ClassTagClass)
+ tpd.cpy.UnApply(body1)(fn, Nil,
+ typed(untpd.Bind(tree.name, arg).withPos(tree.pos), arg.tpe) :: Nil)
+ case _ =>
+ val sym = newPatternBoundSym(tree.name, body1.tpe, tree.pos)
+ assignType(cpy.Bind(tree)(tree.name, body1), sym)
+ }
+ }
+
+ def typedAlternative(tree: untpd.Alternative, pt: Type)(implicit ctx: Context): Alternative = track("typedAlternative") {
+ val trees1 = tree.trees mapconserve (typed(_, pt))
+ assignType(cpy.Alternative(tree)(trees1), trees1)
+ }
+
+ def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(implicit ctx: Context): Unit = {
+ // necessary to force annotation trees to be computed.
+ sym.annotations.foreach(_.ensureCompleted)
+ val annotCtx = ctx.outersIterator.dropWhile(_.owner == sym).next
+ // necessary in order to mark the typed ahead annotations as definitely typed:
+ untpd.modsDeco(mdef).mods.annotations.foreach(typedAnnotation(_)(annotCtx))
+ }
+
+ def typedAnnotation(annot: untpd.Tree)(implicit ctx: Context): Tree = track("typedAnnotation") {
+ typed(annot, defn.AnnotationType)
+ }
+
+ def typedValDef(vdef: untpd.ValDef, sym: Symbol)(implicit ctx: Context) = track("typedValDef") {
+ val ValDef(name, tpt, _) = vdef
+ completeAnnotations(vdef, sym)
+ val tpt1 = checkSimpleKinded(typedType(tpt))
+ val rhs1 = vdef.rhs match {
+ case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe
+ case rhs => typedExpr(rhs, tpt1.tpe)
+ }
+ val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym)
+ if (sym.is(Inline, butNot = DeferredOrParamAccessor))
+ checkInlineConformant(rhs1, em"right-hand side of inline $sym")
+ patchIfLazy(vdef1)
+ vdef1
+ }
+
+ /** Add a @volitile to lazy vals when rewriting from Scala2 */
+ private def patchIfLazy(vdef: ValDef)(implicit ctx: Context): Unit = {
+ val sym = vdef.symbol
+ if (sym.is(Lazy, butNot = Deferred | Module | Synthetic) && !sym.isVolatile &&
+ ctx.scala2Mode && ctx.settings.rewrite.value.isDefined &&
+ !ctx.isAfterTyper)
+ patch(Position(toUntyped(vdef).pos.start), "@volatile ")
+ }
+
+ def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = track("typedDefDef") {
+ val DefDef(name, tparams, vparamss, tpt, _) = ddef
+ completeAnnotations(ddef, sym)
+ val tparams1 = tparams mapconserve (typed(_).asInstanceOf[TypeDef])
+ val vparamss1 = vparamss nestedMapconserve (typed(_).asInstanceOf[ValDef])
+ if (sym is Implicit) checkImplicitParamsNotSingletons(vparamss1)
+ var tpt1 = checkSimpleKinded(typedType(tpt))
+
+ var rhsCtx = ctx
+ if (sym.isConstructor && !sym.isPrimaryConstructor && tparams1.nonEmpty) {
+ // for secondary constructors we need a context that "knows"
+ // that their type parameters are aliases of the class type parameters.
+ // See pos/i941.scala
+ rhsCtx = ctx.fresh.setFreshGADTBounds
+ (tparams1, sym.owner.typeParams).zipped.foreach ((tdef, tparam) =>
+ rhsCtx.gadt.setBounds(tdef.symbol, TypeAlias(tparam.typeRef)))
+ }
+ val rhs1 = typedExpr(ddef.rhs, tpt1.tpe)(rhsCtx)
+
+ // Overwrite inline body to make sure it is not evaluated twice
+ if (sym.isInlineMethod) Inliner.registerInlineInfo(sym, _ => rhs1)
+
+ if (sym.isAnonymousFunction) {
+ // If we define an anonymous function, make sure the return type does not
+ // refer to parameters. This is necessary because closure types are
+ // function types so no dependencies on parameters are allowed.
+ tpt1 = tpt1.withType(avoid(tpt1.tpe, vparamss1.flatMap(_.map(_.symbol))))
+ }
+
+ assignType(cpy.DefDef(ddef)(name, tparams1, vparamss1, tpt1, rhs1), sym)
+ //todo: make sure dependent method types do not depend on implicits or by-name params
+ }
+
+ def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(implicit ctx: Context): Tree = track("typedTypeDef") {
+ val TypeDef(name, rhs) = tdef
+ completeAnnotations(tdef, sym)
+ val rhs1 = tdef.rhs match {
+ case rhs @ PolyTypeTree(tparams, body) =>
+ val tparams1 = tparams.map(typed(_)).asInstanceOf[List[TypeDef]]
+ val body1 = typedType(body)
+ assignType(cpy.PolyTypeTree(rhs)(tparams1, body1), tparams1, body1)
+ case rhs =>
+ typedType(rhs)
+ }
+ assignType(cpy.TypeDef(tdef)(name, rhs1), sym)
+ }
+
+ def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(implicit ctx: Context) = track("typedClassDef") {
+ val TypeDef(name, impl @ Template(constr, parents, self, _)) = cdef
+ val superCtx = ctx.superCallContext
+
+ /** If `ref` is an implicitly parameterized trait, pass an implicit argument list.
+ * Otherwise, if `ref` is a parameterized trait, error.
+ * Note: Traits and classes currently always have at least an empty parameter list ()
+ * before the implicit parameters (this is inserted if not given in source).
+ * We skip this parameter list when deciding whether a trait is parameterless or not.
+ * @param ref The tree referring to the (parent) trait
+ * @param psym Its type symbol
+ * @param cinfo The info of its constructor
+ */
+ def maybeCall(ref: Tree, psym: Symbol, cinfo: Type): Tree = cinfo match {
+ case cinfo: PolyType =>
+ maybeCall(ref, psym, cinfo.resultType)
+ case cinfo @ MethodType(Nil, _) if cinfo.resultType.isInstanceOf[ImplicitMethodType] =>
+ val icall = New(ref).select(nme.CONSTRUCTOR).appliedToNone
+ typedExpr(untpd.TypedSplice(icall))(superCtx)
+ case cinfo @ MethodType(Nil, _) if !cinfo.resultType.isInstanceOf[MethodType] =>
+ ref
+ case cinfo: MethodType =>
+ if (!ctx.erasedTypes) { // after constructors arguments are passed in super call.
+ typr.println(i"constr type: $cinfo")
+ ctx.error(em"parameterized $psym lacks argument list", ref.pos)
+ }
+ ref
+ case _ =>
+ ref
+ }
+
+ def typedParent(tree: untpd.Tree): Tree =
+ if (tree.isType) {
+ val result = typedType(tree)(superCtx)
+ val psym = result.tpe.typeSymbol
+ if (psym.is(Trait) && !cls.is(Trait) && !cls.superClass.isSubClass(psym))
+ maybeCall(result, psym, psym.primaryConstructor.info)
+ else
+ result
+ }
+ else {
+ val result = typedExpr(tree)(superCtx)
+ checkParentCall(result, cls)
+ result
+ }
+
+ completeAnnotations(cdef, cls)
+ val constr1 = typed(constr).asInstanceOf[DefDef]
+ val parentsWithClass = ensureFirstIsClass(parents mapconserve typedParent, cdef.pos.toSynthetic)
+ val parents1 = ensureConstrCall(cls, parentsWithClass)(superCtx)
+ val self1 = typed(self)(ctx.outer).asInstanceOf[ValDef] // outer context where class members are not visible
+ val dummy = localDummy(cls, impl)
+ val body1 = typedStats(impl.body, dummy)(inClassContext(self1.symbol))
+
+ // Expand comments and type usecases
+ cookComments(body1.map(_.symbol), self1.symbol)(localContext(cdef, cls).setNewScope)
+
+ checkNoDoubleDefs(cls)
+ val impl1 = cpy.Template(impl)(constr1, parents1, self1, body1)
+ .withType(dummy.nonMemberTermRef)
+ checkVariance(impl1)
+ if (!cls.is(AbstractOrTrait) && !ctx.isAfterTyper) checkRealizableBounds(cls.typeRef, cdef.namePos)
+ val cdef1 = assignType(cpy.TypeDef(cdef)(name, impl1), cls)
+ if (ctx.phase.isTyper && cdef1.tpe.derivesFrom(defn.DynamicClass) && !ctx.dynamicsEnabled) {
+ val isRequired = parents1.exists(_.tpe.isRef(defn.DynamicClass))
+ ctx.featureWarning(nme.dynamics.toString, "extension of type scala.Dynamic", isScala2Feature = true,
+ cls, isRequired, cdef.pos)
+ }
+ cdef1
+
+ // todo later: check that
+ // 1. If class is non-abstract, it is instantiatable:
+ // - self type is s supertype of own type
+ // - all type members have consistent bounds
+ // 2. all private type members have consistent bounds
+ // 3. Types do not override classes.
+ // 4. Polymorphic type defs override nothing.
+ }
+
+ /** Ensure that the first type in a list of parent types Ps points to a non-trait class.
+ * If that's not already the case, add one. The added class type CT is determined as follows.
+ * First, let C be the unique class such that
+ * - there is a parent P_i such that P_i derives from C, and
+ * - for every class D: If some parent P_j, j <= i derives from D, then C derives from D.
+ * Then, let CT be the smallest type which
+ * - has C as its class symbol, and
+ * - for all parents P_i: If P_i derives from C then P_i <:< CT.
+ */
+ def ensureFirstIsClass(parents: List[Type])(implicit ctx: Context): List[Type] = {
+ def realClassParent(cls: Symbol): ClassSymbol =
+ if (!cls.isClass) defn.ObjectClass
+ else if (!(cls is Trait)) cls.asClass
+ else cls.asClass.classParents match {
+ case parentRef :: _ => realClassParent(parentRef.symbol)
+ case nil => defn.ObjectClass
+ }
+ def improve(candidate: ClassSymbol, parent: Type): ClassSymbol = {
+ val pcls = realClassParent(parent.classSymbol)
+ if (pcls derivesFrom candidate) pcls else candidate
+ }
+ parents match {
+ case p :: _ if p.classSymbol.isRealClass => parents
+ case _ =>
+ val pcls = (defn.ObjectClass /: parents)(improve)
+ typr.println(i"ensure first is class $parents%, % --> ${parents map (_ baseTypeWithArgs pcls)}%, %")
+ val ptype = ctx.typeComparer.glb(
+ defn.ObjectType :: (parents map (_ baseTypeWithArgs pcls)))
+ ptype :: parents
+ }
+ }
+
+ /** Ensure that first parent tree refers to a real class. */
+ def ensureFirstIsClass(parents: List[Tree], pos: Position)(implicit ctx: Context): List[Tree] = parents match {
+ case p :: ps if p.tpe.classSymbol.isRealClass => parents
+ case _ =>
+ // add synthetic class type
+ val first :: _ = ensureFirstIsClass(parents.tpes)
+ TypeTree(checkFeasible(first, pos, em"\n in inferred parent $first")).withPos(pos) :: parents
+ }
+
+ /** If this is a real class, make sure its first parent is a
+ * constructor call. Cannot simply use a type. Overridden in ReTyper.
+ */
+ def ensureConstrCall(cls: ClassSymbol, parents: List[Tree])(implicit ctx: Context): List[Tree] = {
+ val firstParent :: otherParents = parents
+ if (firstParent.isType && !(cls is Trait) && !cls.is(JavaDefined))
+ typed(untpd.New(untpd.TypedSplice(firstParent), Nil)) :: otherParents
+ else parents
+ }
+
+ /** Overridden in retyper */
+ def checkVariance(tree: Tree)(implicit ctx: Context) = VarianceChecker.check(tree)
+
+ def localDummy(cls: ClassSymbol, impl: untpd.Template)(implicit ctx: Context): Symbol =
+ ctx.newLocalDummy(cls, impl.pos)
+
+ def typedImport(imp: untpd.Import, sym: Symbol)(implicit ctx: Context): Import = track("typedImport") {
+ val expr1 = typedExpr(imp.expr, AnySelectionProto)
+ checkStable(expr1.tpe, imp.expr.pos)
+ if (!ctx.isAfterTyper) checkRealizable(expr1.tpe, imp.expr.pos)
+ assignType(cpy.Import(imp)(expr1, imp.selectors), sym)
+ }
+
+ def typedPackageDef(tree: untpd.PackageDef)(implicit ctx: Context): Tree = track("typedPackageDef") {
+ val pid1 = typedExpr(tree.pid, AnySelectionProto)(ctx.addMode(Mode.InPackageClauseName))
+ val pkg = pid1.symbol
+
+ // Package will not exist if a duplicate type has already been entered, see
+ // `tests/neg/1708.scala`, else branch's error message should be supressed
+ if (pkg.exists) {
+ val packageContext =
+ if (pkg is Package) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree)
+ else {
+ ctx.error(em"$pkg is already defined, cannot be a package", tree.pos)
+ ctx
+ }
+ val stats1 = typedStats(tree.stats, pkg.moduleClass)(packageContext)
+ cpy.PackageDef(tree)(pid1.asInstanceOf[RefTree], stats1) withType pkg.valRef
+ } else errorTree(tree, i"package ${tree.pid.name} does not exist")
+ }
+
+ def typedAnnotated(tree: untpd.Annotated, pt: Type)(implicit ctx: Context): Tree = track("typedAnnotated") {
+ val annot1 = typedExpr(tree.annot, defn.AnnotationType)
+ val arg1 = typed(tree.arg, pt)
+ if (ctx.mode is Mode.Type)
+ assignType(cpy.Annotated(tree)(arg1, annot1), arg1, annot1)
+ else {
+ val tpt = TypeTree(AnnotatedType(arg1.tpe.widen, Annotation(annot1)))
+ assignType(cpy.Typed(tree)(arg1, tpt), tpt)
+ }
+ }
+
+ def typedTypedSplice(tree: untpd.TypedSplice)(implicit ctx: Context): Tree =
+ tree.tree match {
+ case tree1: TypeTree => tree1 // no change owner necessary here ...
+ case tree1: Ident => tree1 // ... or here, since these trees cannot contain bindings
+ case tree1 =>
+ if (ctx.owner ne tree.owner) tree1.changeOwner(tree.owner, ctx.owner)
+ else tree1
+ }
+
+
+ def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(implicit ctx: Context): Tree = {
+ val untpd.PostfixOp(qual, nme.WILDCARD) = tree
+ val pt1 = if (defn.isFunctionType(pt)) pt else AnyFunctionProto
+ var res = typed(qual, pt1)
+ if (pt1.eq(AnyFunctionProto) && !defn.isFunctionClass(res.tpe.classSymbol)) {
+ def msg = i"not a function: ${res.tpe}; cannot be followed by `_'"
+ if (ctx.scala2Mode) {
+ // Under -rewrite, patch `x _` to `(() => x)`
+ ctx.migrationWarning(msg, tree.pos)
+ patch(Position(tree.pos.start), "(() => ")
+ patch(Position(qual.pos.end, tree.pos.end), ")")
+ res = typed(untpd.Function(Nil, untpd.TypedSplice(res)))
+ }
+ else ctx.error(msg, tree.pos)
+ }
+ res
+ }
+
+ /** Retrieve symbol attached to given tree */
+ protected def retrieveSym(tree: untpd.Tree)(implicit ctx: Context) = tree.removeAttachment(SymOfTree) match {
+ case Some(sym) =>
+ sym.ensureCompleted()
+ sym
+ case none =>
+ NoSymbol
+ }
+
+ /** A fresh local context with given tree and owner.
+ * Owner might not exist (can happen for self valdefs), in which case
+ * no owner is set in result context
+ */
+ protected def localContext(tree: untpd.Tree, owner: Symbol)(implicit ctx: Context): FreshContext = {
+ val freshCtx = ctx.fresh.setTree(tree)
+ if (owner.exists) freshCtx.setOwner(owner) else freshCtx
+ }
+
+ protected def localTyper(sym: Symbol): Typer = nestedTyper.remove(sym).get
+
+ def typedUnadapted(initTree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = {
+ record("typedUnadapted")
+ val xtree = expanded(initTree)
+ xtree.removeAttachment(TypedAhead) match {
+ case Some(ttree) => ttree
+ case none =>
+
+ def typedNamed(tree: untpd.NameTree, pt: Type)(implicit ctx: Context): Tree = {
+ val sym = retrieveSym(xtree)
+ tree match {
+ case tree: untpd.Ident => typedIdent(tree, pt)
+ case tree: untpd.Select => typedSelect(tree, pt)
+ case tree: untpd.Bind => typedBind(tree, pt)
+ case tree: untpd.ValDef =>
+ if (tree.isEmpty) tpd.EmptyValDef
+ else typedValDef(tree, sym)(localContext(tree, sym).setNewScope)
+ case tree: untpd.DefDef =>
+ val typer1 = localTyper(sym)
+ typer1.typedDefDef(tree, sym)(localContext(tree, sym).setTyper(typer1))
+ case tree: untpd.TypeDef =>
+ if (tree.isClassDef)
+ typedClassDef(tree, sym.asClass)(localContext(tree, sym).setMode(ctx.mode &~ Mode.InSuperCall))
+ else
+ typedTypeDef(tree, sym)(localContext(tree, sym).setNewScope)
+ case _ => typedUnadapted(desugar(tree), pt)
+ }
+ }
+
+ def typedUnnamed(tree: untpd.Tree): Tree = tree match {
+ case tree: untpd.Apply =>
+ if (ctx.mode is Mode.Pattern) typedUnApply(tree, pt) else typedApply(tree, pt)
+ case tree: untpd.This => typedThis(tree)
+ case tree: untpd.Literal => typedLiteral(tree)
+ case tree: untpd.New => typedNew(tree, pt)
+ case tree: untpd.Typed => typedTyped(tree, pt)
+ case tree: untpd.NamedArg => typedNamedArg(tree, pt)
+ case tree: untpd.Assign => typedAssign(tree, pt)
+ case tree: untpd.Block => typedBlock(desugar.block(tree), pt)(ctx.fresh.setNewScope)
+ case tree: untpd.If => typedIf(tree, pt)
+ case tree: untpd.Function => typedFunction(tree, pt)
+ case tree: untpd.Closure => typedClosure(tree, pt)
+ case tree: untpd.Match => typedMatch(tree, pt)
+ case tree: untpd.Return => typedReturn(tree)
+ case tree: untpd.Try => typedTry(tree, pt)
+ case tree: untpd.Throw => typedThrow(tree)
+ case tree: untpd.TypeApply => typedTypeApply(tree, pt)
+ case tree: untpd.Super => typedSuper(tree, pt)
+ case tree: untpd.SeqLiteral => typedSeqLiteral(tree, pt)
+ case tree: untpd.Inlined => typedInlined(tree, pt)
+ case tree: untpd.TypeTree => typedTypeTree(tree, pt)
+ case tree: untpd.SingletonTypeTree => typedSingletonTypeTree(tree)
+ case tree: untpd.AndTypeTree => typedAndTypeTree(tree)
+ case tree: untpd.OrTypeTree => typedOrTypeTree(tree)
+ case tree: untpd.RefinedTypeTree => typedRefinedTypeTree(tree)
+ case tree: untpd.AppliedTypeTree => typedAppliedTypeTree(tree)
+ case tree: untpd.PolyTypeTree => typedPolyTypeTree(tree)(localContext(tree, NoSymbol).setNewScope)
+ case tree: untpd.ByNameTypeTree => typedByNameTypeTree(tree)
+ case tree: untpd.TypeBoundsTree => typedTypeBoundsTree(tree)
+ case tree: untpd.Alternative => typedAlternative(tree, pt)
+ case tree: untpd.PackageDef => typedPackageDef(tree)
+ case tree: untpd.Annotated => typedAnnotated(tree, pt)
+ case tree: untpd.TypedSplice => typedTypedSplice(tree)
+ case tree: untpd.UnApply => typedUnApply(tree, pt)
+ case tree @ untpd.PostfixOp(qual, nme.WILDCARD) => typedAsFunction(tree, pt)
+ case untpd.EmptyTree => tpd.EmptyTree
+ case _ => typedUnadapted(desugar(tree), pt)
+ }
+
+ xtree match {
+ case xtree: untpd.NameTree => typedNamed(encodeName(xtree), pt)
+ case xtree: untpd.Import => typedImport(xtree, retrieveSym(xtree))
+ case xtree => typedUnnamed(xtree)
+ }
+ }
+ }
+
+ protected def encodeName(tree: untpd.NameTree)(implicit ctx: Context): untpd.NameTree =
+ untpd.rename(tree, tree.name.encode)
+
+ def typed(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = /*>|>*/ ctx.traceIndented (i"typing $tree", typr, show = true) /*<|<*/ {
+ assertPositioned(tree)
+ try adapt(typedUnadapted(tree, pt), pt, tree)
+ catch {
+ case ex: CyclicReference => errorTree(tree, cyclicErrorMsg(ex))
+ case ex: TypeError => errorTree(tree, ex.getMessage)
+ }
+ }
+
+ def typedTrees(trees: List[untpd.Tree])(implicit ctx: Context): List[Tree] =
+ trees mapconserve (typed(_))
+
+ def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(implicit ctx: Context): List[tpd.Tree] = {
+ val buf = new mutable.ListBuffer[Tree]
+ @tailrec def traverse(stats: List[untpd.Tree])(implicit ctx: Context): List[Tree] = stats match {
+ case (imp: untpd.Import) :: rest =>
+ val imp1 = typed(imp)
+ buf += imp1
+ traverse(rest)(importContext(imp1.symbol, imp.selectors))
+ case (mdef: untpd.DefTree) :: rest =>
+ mdef.removeAttachment(ExpandedTree) match {
+ case Some(xtree) =>
+ traverse(xtree :: rest)
+ case none =>
+ typed(mdef) match {
+ case mdef1: DefDef if Inliner.hasBodyToInline(mdef1.symbol) =>
+ buf ++= inlineExpansion(mdef1)
+ case mdef1 =>
+ buf += mdef1
+ }
+ traverse(rest)
+ }
+ case Thicket(stats) :: rest =>
+ traverse(stats ++ rest)
+ case stat :: rest =>
+ buf += typed(stat)(ctx.exprContext(stat, exprOwner))
+ traverse(rest)
+ case nil =>
+ buf.toList
+ }
+ traverse(stats)
+ }
+
+ /** Given an inline method `mdef`, the method rewritten so that its body
+ * uses accessors to access non-public members, followed by the accessor definitions.
+ * Overwritten in Retyper to return `mdef` unchanged.
+ */
+ protected def inlineExpansion(mdef: DefDef)(implicit ctx: Context): List[Tree] =
+ tpd.cpy.DefDef(mdef)(rhs = Inliner.bodyToInline(mdef.symbol)) ::
+ Inliner.removeInlineAccessors(mdef.symbol)
+
+ def typedExpr(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree =
+ typed(tree, pt)(ctx retractMode Mode.PatternOrType)
+ def typedType(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = // todo: retract mode between Type and Pattern?
+ typed(tree, pt)(ctx addMode Mode.Type)
+ def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(implicit ctx: Context): Tree =
+ typed(tree, selType)(ctx addMode Mode.Pattern)
+
+ def tryEither[T](op: Context => T)(fallBack: (T, TyperState) => T)(implicit ctx: Context) = {
+ val nestedCtx = ctx.fresh.setNewTyperState
+ val result = op(nestedCtx)
+ if (nestedCtx.reporter.hasErrors)
+ fallBack(result, nestedCtx.typerState)
+ else {
+ nestedCtx.typerState.commit()
+ result
+ }
+ }
+
+ /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back
+ * to errors and result of `op1`.
+ */
+ def tryAlternatively[T](op1: Context => T)(op2: Context => T)(implicit ctx: Context): T =
+ tryEither(op1) { (failedVal, failedState) =>
+ tryEither(op2) { (_, _) =>
+ failedState.commit
+ failedVal
+ }
+ }
+
+ /** Add apply node or implicit conversions. Two strategies are tried, and the first
+ * that is successful is picked. If neither of the strategies are successful, continues with
+ * `fallBack`.
+ *
+ * 1st strategy: Try to insert `.apply` so that the result conforms to prototype `pt`.
+ * 2nd strategy: If tree is a select `qual.name`, try to insert an implicit conversion
+ * around the qualifier part `qual` so that the result conforms to the expected type
+ * with wildcard result type.
+ */
+ def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType)(fallBack: (Tree, TyperState) => Tree)(implicit ctx: Context): Tree =
+ tryEither { implicit ctx =>
+ val sel = typedSelect(untpd.Select(untpd.TypedSplice(tree), nme.apply), pt)
+ if (sel.tpe.isError) sel else adapt(sel, pt)
+ } { (failedTree, failedState) =>
+ tryInsertImplicitOnQualifier(tree, pt).getOrElse(fallBack(failedTree, failedState))
+ }
+
+ /** If this tree is a select node `qual.name`, try to insert an implicit conversion
+ * `c` around `qual` so that `c(qual).name` conforms to `pt`.
+ */
+ def tryInsertImplicitOnQualifier(tree: Tree, pt: Type)(implicit ctx: Context): Option[Tree] = ctx.traceIndented(i"try insert impl on qualifier $tree $pt") {
+ tree match {
+ case Select(qual, name) =>
+ val qualProto = SelectionProto(name, pt, NoViewsAllowed)
+ tryEither { implicit ctx =>
+ val qual1 = adaptInterpolated(qual, qualProto, EmptyTree)
+ if ((qual eq qual1) || ctx.reporter.hasErrors) None
+ else Some(typed(cpy.Select(tree)(untpd.TypedSplice(qual1), name), pt))
+ } { (_, _) => None
+ }
+ case _ => None
+ }
+ }
+
+ def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context): Tree = /*>|>*/ track("adapt") /*<|<*/ {
+ /*>|>*/ ctx.traceIndented(i"adapting $tree of type ${tree.tpe} to $pt", typr, show = true) /*<|<*/ {
+ if (tree.isDef) interpolateUndetVars(tree, tree.symbol)
+ else if (!tree.tpe.widen.isInstanceOf[MethodOrPoly]) interpolateUndetVars(tree, NoSymbol)
+ tree.overwriteType(tree.tpe.simplified)
+ adaptInterpolated(tree, pt, original)
+ }
+ }
+
+ /** (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
+ * (0) Convert expressions with constant types to literals (unless in interactive/scaladoc mode)
+ */
+
+ /** Perform the following adaptations of expression, pattern or type `tree` wrt to
+ * given prototype `pt`:
+ * (1) Resolve overloading
+ * (2) Apply parameterless functions
+ * (3) Apply polymorphic types to fresh instances of their type parameters and
+ * store these instances in context.undetparams,
+ * unless followed by explicit type application.
+ * (4) Do the following to unapplied methods used as values:
+ * (4.1) If the method has only implicit parameters pass implicit arguments
+ * (4.2) otherwise, if `pt` is a function type and method is not a constructor,
+ * convert to function by eta-expansion,
+ * (4.3) otherwise, if the method is nullary with a result type compatible to `pt`
+ * and it is not a constructor, apply it to ()
+ * otherwise issue an error
+ * (5) Convert constructors in a pattern as follows:
+ * (5.1) If constructor refers to a case class factory, set tree's type to the unique
+ * instance of its primary constructor that is a subtype of the expected type.
+ * (5.2) If constructor refers to an extractor, convert to application of
+ * unapply or unapplySeq method.
+ *
+ * (6) Convert all other types to TypeTree nodes.
+ * (7) When in TYPEmode but not FUNmode or HKmode, check that types are fully parameterized
+ * (7.1) In HKmode, higher-kinded types are allowed, but they must have the expected kind-arity
+ * (8) When in both EXPRmode and FUNmode, add apply method calls to values of object type.
+ * (9) If there are undetermined type variables and not POLYmode, infer expression instance
+ * Then, if tree's type is not a subtype of expected type, try the following adaptations:
+ * (10) If the expected type is Byte, Short or Char, and the expression
+ * is an integer fitting in the range of that type, convert it to that type.
+ * (11) Widen numeric literals to their expected type, if necessary
+ * (12) When in mode EXPRmode, convert E to { E; () } if expected type is scala.Unit.
+ * (13) When in mode EXPRmode, apply AnnotationChecker conversion if expected type is annotated.
+ * (14) When in mode EXPRmode, apply a view
+ * If all this fails, error
+ */
+ def adaptInterpolated(tree: Tree, pt: Type, original: untpd.Tree)(implicit ctx: Context): Tree = {
+
+ assert(pt.exists)
+
+ def methodStr = err.refStr(methPart(tree).tpe)
+
+ def missingArgs = errorTree(tree,
+ em"""missing arguments for $methodStr
+ |follow this method with `_' if you want to treat it as a partially applied function""")
+
+ def adaptOverloaded(ref: TermRef) = {
+ val altDenots = ref.denot.alternatives
+ typr.println(i"adapt overloaded $ref with alternatives ${altDenots map (_.info)}%, %")
+ val alts = altDenots map (alt =>
+ TermRef.withSigAndDenot(ref.prefix, ref.name, alt.info.signature, alt))
+ def expectedStr = err.expectedTypeStr(pt)
+ resolveOverloaded(alts, pt) match {
+ case alt :: Nil =>
+ adapt(tree.withType(alt), pt, original)
+ case Nil =>
+ def noMatches =
+ errorTree(tree,
+ em"""none of the ${err.overloadedAltsStr(altDenots)}
+ |match $expectedStr""")
+ def hasEmptyParams(denot: SingleDenotation) = denot.info.paramTypess == ListOfNil
+ pt match {
+ case pt: FunProto =>
+ tryInsertApplyOrImplicit(tree, pt)((_, _) => noMatches)
+ case _ =>
+ if (altDenots exists (_.info.paramTypess == ListOfNil))
+ typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt)
+ else
+ noMatches
+ }
+ case alts =>
+ val remainingDenots = alts map (_.denot.asInstanceOf[SingleDenotation])
+ def all = if (remainingDenots.length == 2) "both" else "all"
+ errorTree(tree,
+ em"""Ambiguous overload. The ${err.overloadedAltsStr(remainingDenots)}
+ |$all match $expectedStr""")
+ }
+ }
+
+ def isUnary(tp: Type): Boolean = tp match {
+ case tp: MethodicType =>
+ tp.firstParamTypes match {
+ case ptype :: Nil => !ptype.isRepeatedParam
+ case _ => false
+ }
+ case tp: TermRef =>
+ tp.denot.alternatives.forall(alt => isUnary(alt.info))
+ case _ =>
+ false
+ }
+
+ def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match {
+ case _: MethodType | _: PolyType =>
+ if (pt.args.lengthCompare(1) > 0 && isUnary(wtp) && ctx.canAutoTuple)
+ adaptInterpolated(tree, pt.tupled, original)
+ else
+ tree
+ case _ => tryInsertApplyOrImplicit(tree, pt) {
+ val more = tree match {
+ case Apply(_, _) => " more"
+ case _ => ""
+ }
+ (_, _) => errorTree(tree, em"$methodStr does not take$more parameters")
+ }
+ }
+
+ /** If `tp` is a TypeVar which is fully constrained (i.e. its upper bound `hi` conforms
+ * to its lower bound `lo`), replace `tp` by `hi`. This is necessary to
+ * keep the right constraints for some implicit search problems. The paradigmatic case
+ * is `implicitNums.scala`. Without the healing done in `followAlias`, we cannot infer
+ * implicitly[_3], where _2 is the typelevel number 3. The problem here is that if a
+ * prototype is, say, Succ[Succ[Zero]], we can infer that it's argument type is Succ[Zero].
+ * But if the prototype is N? >: Succ[Succ[Zero]] <: Succ[Succ[Zero]], the same
+ * decomposition does not work - we'd get a N?#M where M is the element type name of Succ
+ * instead.
+ */
+ def followAlias(tp: Type)(implicit ctx: Context): Type = {
+ val constraint = ctx.typerState.constraint
+ def inst(tp: Type): Type = tp match {
+ case TypeBounds(lo, hi)
+ if (lo eq hi) || (hi <:< lo)(ctx.fresh.setExploreTyperState) =>
+ inst(lo)
+ case tp: PolyParam =>
+ constraint.typeVarOfParam(tp).orElse(tp)
+ case _ => tp
+ }
+ tp match {
+ case tp: TypeVar if constraint.contains(tp) => inst(constraint.entry(tp.origin))
+ case _ => tp
+ }
+ }
+
+ def adaptNoArgs(wtp: Type): Tree = wtp match {
+ case wtp: ExprType =>
+ adaptInterpolated(tree.withType(wtp.resultType), pt, original)
+ case wtp: ImplicitMethodType if constrainResult(wtp, followAlias(pt)) =>
+ val tvarsToInstantiate = tvarsInParams(tree)
+ wtp.paramTypes.foreach(instantiateSelected(_, tvarsToInstantiate))
+ val constr = ctx.typerState.constraint
+ def addImplicitArgs(implicit ctx: Context) = {
+ val errors = new mutable.ListBuffer[() => String]
+ def implicitArgError(msg: => String) = {
+ errors += (() => msg)
+ EmptyTree
+ }
+ def issueErrors() = {
+ for (err <- errors) ctx.error(err(), tree.pos.endPos)
+ tree.withType(wtp.resultType)
+ }
+ val args = (wtp.paramNames, wtp.paramTypes).zipped map { (pname, formal) =>
+ def implicitArgError(msg: String => String) =
+ errors += (() => msg(em"parameter $pname of $methodStr"))
+ inferImplicitArg(formal, implicitArgError, tree.pos.endPos)
+ }
+ if (errors.nonEmpty) {
+ // If there are several arguments, some arguments might already
+ // have influenced the context, binding variables, but later ones
+ // might fail. In that case the constraint needs to be reset.
+ ctx.typerState.constraint = constr
+
+ // If method has default params, fall back to regular application
+ // where all inferred implicits are passed as named args.
+ if (tree.symbol.hasDefaultParams) {
+ val namedArgs = (wtp.paramNames, args).zipped.flatMap { (pname, arg) =>
+ arg match {
+ case EmptyTree => Nil
+ case _ => untpd.NamedArg(pname, untpd.TypedSplice(arg)) :: Nil
+ }
+ }
+ tryEither { implicit ctx =>
+ typed(untpd.Apply(untpd.TypedSplice(tree), namedArgs), pt)
+ } { (_, _) =>
+ issueErrors()
+ }
+ } else issueErrors()
+ }
+ else adapt(tpd.Apply(tree, args), pt)
+ }
+ if ((pt eq WildcardType) || original.isEmpty) addImplicitArgs(argCtx(tree))
+ else
+ ctx.typerState.tryWithFallback(addImplicitArgs(argCtx(tree))) {
+ adapt(typed(original, WildcardType), pt, EmptyTree)
+ }
+ case wtp: MethodType if !pt.isInstanceOf[SingletonType] =>
+ val arity =
+ if (defn.isFunctionType(pt))
+ if (!isFullyDefined(pt, ForceDegree.none) && isFullyDefined(wtp, ForceDegree.none))
+ // if method type is fully defined, but expected type is not,
+ // prioritize method parameter types as parameter types of the eta-expanded closure
+ 0
+ else defn.functionArity(pt)
+ else if (pt eq AnyFunctionProto) wtp.paramTypes.length
+ else -1
+ if (arity >= 0 && !tree.symbol.isConstructor)
+ typed(etaExpand(tree, wtp, arity), pt)
+ else if (wtp.paramTypes.isEmpty)
+ adaptInterpolated(tpd.Apply(tree, Nil), pt, EmptyTree)
+ else if (wtp.isImplicit)
+ err.typeMismatch(tree, pt)
+ else
+ missingArgs
+ case _ =>
+ ctx.typeComparer.GADTused = false
+ if (ctx.mode is Mode.Pattern) {
+ tree match {
+ case _: RefTree | _: Literal if !isVarPattern(tree) =>
+ checkCanEqual(pt, wtp, tree.pos)(ctx.retractMode(Mode.Pattern))
+ case _ =>
+ }
+ tree
+ }
+ else if (tree.tpe <:< pt) {
+ if (pt.hasAnnotation(defn.InlineParamAnnot))
+ checkInlineConformant(tree, "argument to inline parameter")
+ if (Inliner.hasBodyToInline(tree.symbol) &&
+ !ctx.owner.ownersIterator.exists(_.isInlineMethod) &&
+ !ctx.settings.YnoInline.value &&
+ !ctx.isAfterTyper)
+ adapt(Inliner.inlineCall(tree, pt), pt)
+ else if (ctx.typeComparer.GADTused && pt.isValueType)
+ // Insert an explicit cast, so that -Ycheck in later phases succeeds.
+ // I suspect, but am not 100% sure that this might affect inferred types,
+ // if the expected type is a supertype of the GADT bound. It would be good to come
+ // up with a test case for this.
+ tree.asInstance(pt)
+ else
+ tree
+ }
+ else if (wtp.isInstanceOf[MethodType]) missingArgs
+ else {
+ typr.println(i"adapt to subtype ${tree.tpe} !<:< $pt")
+ //typr.println(TypeComparer.explained(implicit ctx => tree.tpe <:< pt))
+ adaptToSubType(wtp)
+ }
+ }
+ /** Adapt an expression of constant type to a different constant type `tpe`. */
+ def adaptConstant(tree: Tree, tpe: ConstantType): Tree = {
+ def lit = Literal(tpe.value).withPos(tree.pos)
+ tree match {
+ case Literal(c) => lit
+ case tree @ Block(stats, expr) => tpd.cpy.Block(tree)(stats, adaptConstant(expr, tpe))
+ case tree =>
+ if (isIdempotentExpr(tree)) lit // See discussion in phase Literalize why we demand isIdempotentExpr
+ else Block(tree :: Nil, lit)
+ }
+ }
+
+ def adaptToSubType(wtp: Type): Tree = {
+ // try converting a constant to the target type
+ val folded = ConstFold(tree, pt)
+ if (folded ne tree) return adaptConstant(folded, folded.tpe.asInstanceOf[ConstantType])
+ // drop type if prototype is Unit
+ if (pt isRef defn.UnitClass)
+ // local adaptation makes sure every adapted tree conforms to its pt
+ // so will take the code path that decides on inlining
+ return tpd.Block(adapt(tree, WildcardType) :: Nil, Literal(Constant(())))
+ // convert function literal to SAM closure
+ tree match {
+ case Closure(Nil, id @ Ident(nme.ANON_FUN), _)
+ if defn.isFunctionType(wtp) && !defn.isFunctionType(pt) =>
+ pt match {
+ case SAMType(meth)
+ if wtp <:< meth.info.toFunctionType() =>
+ // was ... && isFullyDefined(pt, ForceDegree.noBottom)
+ // but this prevents case blocks from implementing polymorphic partial functions,
+ // since we do not know the result parameter a priori. Have to wait until the
+ // body is typechecked.
+ return cpy.Closure(tree)(Nil, id, TypeTree(pt)).withType(pt)
+ case _ =>
+ }
+ case _ =>
+ }
+ // try an implicit conversion
+ inferView(tree, pt) match {
+ case SearchSuccess(inferred, _, _) =>
+ adapt(inferred, pt)
+ case failure: SearchFailure =>
+ if (pt.isInstanceOf[ProtoType] && !failure.isInstanceOf[AmbiguousImplicits]) tree
+ else err.typeMismatch(tree, pt, failure)
+ }
+ }
+
+ def adaptType(tp: Type): Tree = {
+ val tree1 =
+ if ((pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty) tree
+ else tree.withType(tree.tpe.EtaExpand(tp.typeParamSymbols))
+ if ((ctx.mode is Mode.Pattern) || tree1.tpe <:< pt) tree1
+ else err.typeMismatch(tree1, pt)
+ }
+
+ tree match {
+ case _: MemberDef | _: PackageDef | _: Import | _: WithoutTypeOrPos[_] => tree
+ case _ => tree.tpe.widen match {
+ case _: ErrorType =>
+ tree
+ case ref: TermRef =>
+ pt match {
+ case pt: FunProto
+ if pt.args.lengthCompare(1) > 0 && isUnary(ref) && ctx.canAutoTuple =>
+ adaptInterpolated(tree, pt.tupled, original)
+ case _ =>
+ adaptOverloaded(ref)
+ }
+ case poly: PolyType if !(ctx.mode is Mode.Type) =>
+ if (pt.isInstanceOf[PolyProto]) tree
+ else {
+ var typeArgs = tree match {
+ case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo
+ case _ => Nil
+ }
+ if (typeArgs.isEmpty) typeArgs = constrained(poly, tree)._2
+ convertNewGenericArray(
+ adaptInterpolated(tree.appliedToTypes(typeArgs), pt, original))
+ }
+ case wtp =>
+ pt match {
+ case pt: FunProto =>
+ adaptToArgs(wtp, pt)
+ case pt: PolyProto =>
+ tryInsertApplyOrImplicit(tree, pt) {
+ (_, _) => tree // error will be reported in typedTypeApply
+ }
+ case _ =>
+ if (ctx.mode is Mode.Type) adaptType(tree.tpe)
+ else adaptNoArgs(wtp)
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
new file mode 100644
index 000000000..d5dd5a024
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
@@ -0,0 +1,148 @@
+package dotty.tools.dotc
+package typer
+
+import dotty.tools.dotc.ast.{ Trees, tpd }
+import core._
+import Types._, Contexts._, Flags._, Symbols._, Annotations._, Trees._, NameOps._
+import Decorators._
+import Variances._
+import util.Positions._
+import rewrite.Rewrites.patch
+import config.Printers.variances
+
+/** Provides `check` method to check that all top-level definitions
+ * in tree are variance correct. Does not recurse inside methods.
+ * The method should be invoked once for each Template.
+ */
+object VarianceChecker {
+ case class VarianceError(tvar: Symbol, required: Variance)
+ def check(tree: tpd.Tree)(implicit ctx: Context) =
+ new VarianceChecker()(ctx).Traverser.traverse(tree)
+}
+
+class VarianceChecker()(implicit ctx: Context) {
+ import VarianceChecker._
+ import tpd._
+
+ private object Validator extends TypeAccumulator[Option[VarianceError]] {
+ private var base: Symbol = _
+
+ /** Is no variance checking needed within definition of `base`? */
+ def ignoreVarianceIn(base: Symbol): Boolean = (
+ base.isTerm
+ || base.is(Package)
+ || base.is(Local)
+ )
+
+ /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`.
+ * The search proceeds from `base` to the owner of `tvar`.
+ * Initially the state is covariant, but it might change along the search.
+ */
+ def relativeVariance(tvar: Symbol, base: Symbol, v: Variance = Covariant): Variance = /*ctx.traceIndented(i"relative variance of $tvar wrt $base, so far: $v")*/ {
+ if (base == tvar.owner) v
+ else if ((base is Param) && base.owner.isTerm)
+ relativeVariance(tvar, paramOuter(base.owner), flip(v))
+ else if (ignoreVarianceIn(base.owner)) Bivariant
+ else if (base.isAliasType) relativeVariance(tvar, base.owner, Invariant)
+ else relativeVariance(tvar, base.owner, v)
+ }
+
+ /** The next level to take into account when determining the
+ * relative variance with a method parameter as base. The method
+ * is always skipped. If the method is a constructor, we also skip
+ * its class owner, because constructors are not checked for variance
+ * relative to the type parameters of their own class. On the other
+ * hand constructors do count for checking the variance of type parameters
+ * of enclosing classes. I believe the Scala 2 rules are too lenient in
+ * that respect.
+ */
+ private def paramOuter(meth: Symbol) =
+ if (meth.isConstructor) meth.owner.owner else meth.owner
+
+ /** Check variance of abstract type `tvar` when referred from `base`. */
+ private def checkVarianceOfSymbol(tvar: Symbol): Option[VarianceError] = {
+ val relative = relativeVariance(tvar, base)
+ if (relative == Bivariant || tvar.is(BaseTypeArg)) None
+ else {
+ val required = compose(relative, this.variance)
+ def tvar_s = s"$tvar (${varianceString(tvar.flags)} ${tvar.showLocated})"
+ def base_s = s"$base in ${base.owner}" + (if (base.owner.isClass) "" else " in " + base.owner.enclosingClass)
+ ctx.log(s"verifying $tvar_s is ${varianceString(required)} at $base_s")
+ ctx.log(s"relative variance: ${varianceString(relative)}")
+ ctx.log(s"current variance: ${this.variance}")
+ ctx.log(s"owner chain: ${base.ownersIterator.toList}")
+ if (tvar is required) None
+ else Some(VarianceError(tvar, required))
+ }
+ }
+
+ /** For PolyTypes, type parameters are skipped because they are defined
+ * explicitly (their TypeDefs will be passed here.) For MethodTypes, the
+ * same is true of the parameters (ValDefs).
+ */
+ def apply(status: Option[VarianceError], tp: Type): Option[VarianceError] = ctx.traceIndented(s"variance checking $tp of $base at $variance", variances) {
+ if (status.isDefined) status
+ else tp match {
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.variance != 0 && base.isContainedIn(sym.owner)) checkVarianceOfSymbol(sym)
+ else if (sym.isAliasType) this(status, sym.info.bounds.hi)
+ else foldOver(status, tp)
+ case tp: MethodType =>
+ this(status, tp.resultType) // params will be checked in their TypeDef nodes.
+ case tp: PolyType =>
+ this(status, tp.resultType) // params will be checked in their ValDef nodes.
+ case AnnotatedType(_, annot) if annot.symbol == defn.UncheckedVarianceAnnot =>
+ status
+ //case tp: ClassInfo =>
+ // ??? not clear what to do here yet. presumably, it's all checked at local typedefs
+ case _ =>
+ foldOver(status, tp)
+ }
+ }
+
+ def validateDefinition(base: Symbol): Option[VarianceError] = {
+ val saved = this.base
+ this.base = base
+ try apply(None, base.info)
+ finally this.base = saved
+ }
+ }
+
+ private object Traverser extends TreeTraverser {
+ def checkVariance(sym: Symbol, pos: Position) = Validator.validateDefinition(sym) match {
+ case Some(VarianceError(tvar, required)) =>
+ def msg = i"${varianceString(tvar.flags)} $tvar occurs in ${varianceString(required)} position in type ${sym.info} of $sym"
+ if (ctx.scala2Mode && sym.owner.isConstructor) {
+ ctx.migrationWarning(s"According to new variance rules, this is no longer accepted; need to annotate with @uncheckedVariance:\n$msg", sym.pos)
+ patch(Position(pos.end), " @scala.annotation.unchecked.uncheckedVariance") // TODO use an import or shorten if possible
+ }
+ else ctx.error(msg, sym.pos)
+ case None =>
+ }
+
+ override def traverse(tree: Tree)(implicit ctx: Context) = {
+ def sym = tree.symbol
+ // No variance check for private/protected[this] methods/values.
+ def skip =
+ !sym.exists ||
+ sym.is(Local) || // !!! watch out for protected local!
+ sym.is(TypeParam) && sym.owner.isClass // already taken care of in primary constructor of class
+ tree match {
+ case defn: MemberDef if skip =>
+ ctx.debuglog(s"Skipping variance check of ${sym.showDcl}")
+ case tree: TypeDef =>
+ checkVariance(sym, tree.pos)
+ case tree: ValDef =>
+ checkVariance(sym, tree.pos)
+ case DefDef(_, tparams, vparamss, _, _) =>
+ checkVariance(sym, tree.pos)
+ tparams foreach traverse
+ vparamss foreach (_ foreach traverse)
+ case Template(_, _, _, body) =>
+ traverseChildren(tree)
+ case _ =>
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/Variances.scala b/compiler/src/dotty/tools/dotc/typer/Variances.scala
new file mode 100644
index 000000000..92bd9fd74
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/Variances.scala
@@ -0,0 +1,116 @@
+package dotty.tools.dotc
+package typer
+
+import dotty.tools.dotc.ast.{Trees, tpd}
+import core._
+import Types._, Contexts._, Flags._, Symbols._, Annotations._, Trees._
+import Decorators._
+
+object Variances {
+ import tpd._
+
+ type Variance = FlagSet
+ val Bivariant = VarianceFlags
+ val Invariant = EmptyFlags
+
+ /** Flip between covariant and contravariant */
+ def flip(v: Variance): Variance = {
+ if (v == Covariant) Contravariant
+ else if (v == Contravariant) Covariant
+ else v
+ }
+
+ /** Map everything below Bivariant to Invariant */
+ def cut(v: Variance): Variance =
+ if (v == Bivariant) v else Invariant
+
+ def compose(v: Variance, boundsVariance: Int) =
+ if (boundsVariance == 1) v
+ else if (boundsVariance == -1) flip(v)
+ else cut(v)
+
+ /** Compute variance of type parameter `tparam' in types of all symbols `sym'. */
+ def varianceInSyms(syms: List[Symbol])(tparam: Symbol)(implicit ctx: Context): Variance =
+ (Bivariant /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
+
+ /** Compute variance of type parameter `tparam' in type of symbol `sym'. */
+ def varianceInSym(sym: Symbol)(tparam: Symbol)(implicit ctx: Context): Variance =
+ if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
+ else varianceInType(sym.info)(tparam)
+
+ /** Compute variance of type parameter `tparam' in all types `tps'. */
+ def varianceInTypes(tps: List[Type])(tparam: Symbol)(implicit ctx: Context): Variance =
+ (Bivariant /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
+
+ /** Compute variance of type parameter `tparam' in all type arguments
+ * <code>tps</code> which correspond to formal type parameters `tparams1'.
+ */
+ def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol)(implicit ctx: Context): Variance = {
+ var v: Variance = Bivariant;
+ for ((tp, tparam1) <- tps zip tparams1) {
+ val v1 = varianceInType(tp)(tparam)
+ v = v & (if (tparam1.is(Covariant)) v1
+ else if (tparam1.is(Contravariant)) flip(v1)
+ else cut(v1))
+ }
+ v
+ }
+
+ /** Compute variance of type parameter `tparam' in all type annotations `annots'. */
+ def varianceInAnnots(annots: List[Annotation])(tparam: Symbol)(implicit ctx: Context): Variance = {
+ (Bivariant /: annots) ((v, annot) => v & varianceInAnnot(annot)(tparam))
+ }
+
+ /** Compute variance of type parameter `tparam' in type annotation `annot'. */
+ def varianceInAnnot(annot: Annotation)(tparam: Symbol)(implicit ctx: Context): Variance = {
+ varianceInType(annot.tree.tpe)(tparam)
+ }
+
+ /** Compute variance of type parameter <code>tparam</code> in type <code>tp</code>. */
+ def varianceInType(tp: Type)(tparam: Symbol)(implicit ctx: Context): Variance = tp match {
+ case TermRef(pre, _) =>
+ varianceInType(pre)(tparam)
+ case tp @ TypeRef(pre, _) =>
+ if (tp.symbol == tparam) Covariant else varianceInType(pre)(tparam)
+ case tp @ TypeBounds(lo, hi) =>
+ if (lo eq hi) compose(varianceInType(hi)(tparam), tp.variance)
+ else flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
+ case tp @ RefinedType(parent, _, rinfo) =>
+ varianceInType(parent)(tparam) & varianceInType(rinfo)(tparam)
+ case tp: RecType =>
+ varianceInType(tp.parent)(tparam)
+ case tp @ MethodType(_, paramTypes) =>
+ flip(varianceInTypes(paramTypes)(tparam)) & varianceInType(tp.resultType)(tparam)
+ case ExprType(restpe) =>
+ varianceInType(restpe)(tparam)
+ case tp @ HKApply(tycon, args) =>
+ def varianceInArgs(v: Variance, args: List[Type], tparams: List[TypeParamInfo]): Variance =
+ args match {
+ case arg :: args1 =>
+ varianceInArgs(
+ v & compose(varianceInType(arg)(tparam), tparams.head.paramVariance),
+ args1, tparams.tail)
+ case nil =>
+ v
+ }
+ varianceInArgs(varianceInType(tycon)(tparam), args, tycon.typeParams)
+ case tp: PolyType =>
+ flip(varianceInTypes(tp.paramBounds)(tparam)) & varianceInType(tp.resultType)(tparam)
+ case AnnotatedType(tp, annot) =>
+ varianceInType(tp)(tparam) & varianceInAnnot(annot)(tparam)
+ case tp: AndOrType =>
+ varianceInType(tp.tp1)(tparam) & varianceInType(tp.tp2)(tparam)
+ case _ =>
+ Bivariant
+ }
+
+ def varianceString(v: Variance) =
+ if (v is Covariant) "covariant"
+ else if (v is Contravariant) "contravariant"
+ else "invariant"
+
+ def varianceString(v: Int) =
+ if (v > 0) "+"
+ else if (v < 0) "-"
+ else ""
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Attachment.scala b/compiler/src/dotty/tools/dotc/util/Attachment.scala
new file mode 100644
index 000000000..20facfd97
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Attachment.scala
@@ -0,0 +1,96 @@
+package dotty.tools.dotc.util
+
+/** A class inheriting from Attachment.Container supports
+ * adding, removing and lookup of attachments. Attachments are typed key/value pairs.
+ */
+object Attachment {
+ import Property.Key
+
+ /** An implementation trait for attachments.
+ * Clients should inherit from Container instead.
+ */
+ trait LinkSource {
+ private[Attachment] var next: Link[_]
+
+ /** Optionally get attachment corresponding to `key` */
+ final def getAttachment[V](key: Key[V]): Option[V] = {
+ val nx = next
+ if (nx == null) None
+ else if (nx.key eq key) Some(nx.value.asInstanceOf[V])
+ else nx.getAttachment[V](key)
+ }
+
+ /** The attachment corresponding to `key`.
+ * @throws NoSuchElementException if no attachment with key exists
+ */
+ final def attachment[V](key: Key[V]): V = {
+ val nx = next
+ if (nx == null) throw new NoSuchElementException
+ else if (nx.key eq key) nx.value.asInstanceOf[V]
+ else nx.attachment(key)
+ }
+
+ /** The attachment corresponding to `key`, or `default`
+ * if no attachment with `key` exists.
+ */
+ final def attachmentOrElse[V](key: Key[V], default: V): V = {
+ val nx = next
+ if (nx == null) default
+ else if (nx.key eq key) nx.value.asInstanceOf[V]
+ else nx.attachmentOrElse(key, default)
+ }
+
+ /** Add attachment with given `key` and `value`.
+ * @return Optionally, the old attachment with given `key` if one existed before.
+ * The new attachment is added at the position of the old one, or at the end
+ * if no attachment with same `key` existed.
+ */
+ final def putAttachment[V](key: Key[V], value: V): Option[V] = {
+ val nx = next
+ if (nx == null) {
+ next = new Link(key, value, null)
+ None
+ }
+ else if (nx.key eq key) {
+ next = new Link(key, value, nx.next)
+ Some(nx.value.asInstanceOf[V])
+ }
+ else nx.putAttachment(key, value)
+ }
+
+ /** Remove attachment with given `key`, if it exists.
+ * @return Optionally, the removed attachment with given `key` if one existed before.
+ */
+ final def removeAttachment[V](key: Key[V]): Option[V] = {
+ val nx = next
+ if (nx == null)
+ None
+ else if (nx.key eq key) {
+ next = nx.next
+ Some(nx.value.asInstanceOf[V])
+ }
+ else nx.removeAttachment(key)
+ }
+
+ /** The list of all keys and values attached to this container. */
+ final def allAttachments: List[(Key[_], Any)] = {
+ val nx = next
+ if (nx == null) Nil else (nx.key, nx.value) :: nx.allAttachments
+ }
+ }
+
+ /** A private, concrete implementation class linking attachments.
+ */
+ private[Attachment] class Link[+V](val key: Key[V], val value: V, var next: Link[_])
+ extends LinkSource
+
+ /** A trait for objects that can contain attachments */
+ trait Container extends LinkSource {
+ private[Attachment] var next: Link[_] = null
+
+ final def pushAttachment[V](key: Key[V], value: V): Unit = {
+ assert(!getAttachment(key).isDefined, s"duplicate attachment for key $key")
+ next = new Link(key, value, next)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala
new file mode 100644
index 000000000..bae3b4732
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Chars.scala
@@ -0,0 +1,96 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools.dotc
+package util
+
+import scala.annotation.switch
+import java.lang.{ Character => JCharacter }
+import java.lang.{Character => JCharacter}
+import java.lang.Character.LETTER_NUMBER
+import java.lang.Character.LOWERCASE_LETTER
+import java.lang.Character.OTHER_LETTER
+import java.lang.Character.TITLECASE_LETTER
+import java.lang.Character.UPPERCASE_LETTER
+
+/** Contains constants and classifier methods for characters */
+object Chars {
+
+ final val LF = '\u000A'
+ final val FF = '\u000C'
+ final val CR = '\u000D'
+ final val SU = '\u001A'
+
+ /** Convert a character digit to an Int according to given base,
+ * -1 if no success
+ */
+ def digit2int(ch: Char, base: Int): Int = {
+ val num = (
+ if (ch <= '9') ch - '0'
+ else if ('a' <= ch && ch <= 'z') ch - 'a' + 10
+ else if ('A' <= ch && ch <= 'Z') ch - 'A' + 10
+ else -1
+ )
+ if (0 <= num && num < base) num else -1
+ }
+ /** Buffer for creating '\ u XXXX' strings. */
+ private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0)
+
+ /** Convert a character to a backslash-u escape */
+ def char2uescape(c: Char): String = {
+ @inline def hexChar(ch: Int): Char =
+ (( if (ch < 10) '0' else 'A' - 10 ) + ch).toChar
+
+ char2uescapeArray(2) = hexChar((c >> 12) )
+ char2uescapeArray(3) = hexChar((c >> 8) % 16)
+ char2uescapeArray(4) = hexChar((c >> 4) % 16)
+ char2uescapeArray(5) = hexChar((c ) % 16)
+
+ new String(char2uescapeArray)
+ }
+
+ /** Is character a line break? */
+ def isLineBreakChar(c: Char) = (c: @switch) match {
+ case LF|FF|CR|SU => true
+ case _ => false
+ }
+
+ /** Is character a whitespace character (but not a new line)? */
+ def isWhitespace(c: Char) =
+ c == ' ' || c == '\t' || c == CR
+
+ /** Can character form part of a doc comment variable $xxx? */
+ def isVarPart(c: Char) =
+ '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+
+ /** Can character start an alphanumeric Scala identifier? */
+ def isIdentifierStart(c: Char): Boolean =
+ (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
+
+ /** Can character form part of an alphanumeric Scala identifier? */
+ def isIdentifierPart(c: Char) =
+ (c == '$') || Character.isUnicodeIdentifierPart(c)
+
+ /** Is character a math or other symbol in Unicode? */
+ def isSpecial(c: Char) = {
+ val chtp = Character.getType(c)
+ chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
+ }
+
+ private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_'
+ private final val letterGroups = {
+ import JCharacter._
+ Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER)
+ }
+ def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch)
+
+ /** Can character form part of a Scala operator name? */
+ def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '/' | '\\' => true
+ case c => isSpecial(c)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
new file mode 100644
index 000000000..cc790d683
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
@@ -0,0 +1,239 @@
+/*
+ * Port of DocStrings.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+package dotty.tools.dotc.util
+
+/** The comment parsing in `dotc` is used by both the comment cooking and the
+ * dottydoc tool.
+ *
+ * The comment cooking is used to expand comments with `@inheritdoc` and
+ * `@define` annotations. The rest of the comment is untouched and later
+ * handled by dottydoc.
+ */
+object CommentParsing {
+ import scala.reflect.internal.Chars._
+
+ /** Returns index of string `str` following `start` skipping longest
+ * sequence of whitespace characters characters (but no newlines)
+ */
+ def skipWhitespace(str: String, start: Int): Int =
+ if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipIdent(str: String, start: Int): Int =
+ if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
+ /** Returns index of string `str` after `start` skipping longest
+ * sequence of space and tab characters, possibly also containing
+ * a single `*` character or the `/``**` sequence.
+ * @pre start == str.length || str(start) == `\n`
+ */
+ def skipLineLead(str: String, start: Int): Int =
+ if (start == str.length) start
+ else {
+ val idx = skipWhitespace(str, start + 1)
+ if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
+ else idx
+ }
+
+ /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`.
+ */
+ def skipToEol(str: String, start: Int): Int =
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ else start
+
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
+ * which satisfies predicate `p`.
+ */
+ def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
+ val idx = skipLineLead(str, skipToEol(str, start))
+ if (idx < str.length && !p(idx)) findNext(str, idx)(p)
+ else idx
+ }
+
+ /** Return first index following `start` and starting a line (i.e. after skipLineLead)
+ * which satisfies predicate `p`.
+ */
+ def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
+ val idx = findNext(str, start)(p)
+ if (idx == str.length) List()
+ else idx :: findAll(str, idx)(p)
+ }
+
+ /** Produces a string index, which is a list of `sections`, i.e
+ * pairs of start/end positions of all tagged sections in the string.
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
+ * characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
+ */
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
+ case List() => List()
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ }
+ }
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.indexWhere(str.startsWith("@usecase", _)) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _))
+ commentSections ::: usecaseSections
+ case _ =>
+ idxs
+ }
+ }
+
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.startsWith("@inheritdoc", _))
+
+ /** Does interval `iv` start with given `tag`?
+ */
+ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
+ startsWithTag(str, section._1, tag)
+
+ def startsWithTag(str: String, start: Int, tag: String): Boolean =
+ str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
+
+ /** The first start tag of a list of tag intervals,
+ * or the end of the whole comment string - 2 if list is empty
+ */
+ def startTag(str: String, sections: List[(Int, Int)]) = sections match {
+ case Nil => str.length - 2
+ case (start, _) :: _ => start
+ }
+
+ /** A map from parameter names to start/end indices describing all parameter
+ * sections in `str` tagged with `tag`, where `sections` is the index of `str`.
+ */
+ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections if startsWithTag(str, section, tag)) yield {
+ val start = skipWhitespace(str, section._1 + tag.length)
+ str.substring(start, skipIdent(str, start)) -> section
+ }
+ }
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @group. */
+ def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@group"))
+
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @return.
+ */
+ def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@return"))
+
+ /** Extracts variable name from a string, stripping any pair of surrounding braces */
+ def variableName(str: String): String =
+ if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}')
+ str.substring(1, str.length - 1)
+ else
+ str
+
+ /** Returns index following variable, or start index if no variable was recognized
+ */
+ def skipVariable(str: String, start: Int): Int = {
+ var idx = start
+ if (idx < str.length && (str charAt idx) == '{') {
+ do idx += 1
+ while (idx < str.length && (str charAt idx) != '}')
+ if (idx < str.length) idx + 1 else start
+ } else {
+ while (idx < str.length && isVarPart(str charAt idx))
+ idx += 1
+ idx
+ }
+ }
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an identifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ val (beg, _) = section
+ assert(str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+
+ val start = skipWhitespace(str, skipTag(str, beg))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ val (beg, end) = section
+ if (str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end)
+ else
+ (skipWhitespace(str, skipTag(str, beg)), end)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
+
+ def removeSections(raw: String, xs: String*): String = {
+ val sections = tagIndex(raw)
+
+ val toBeRemoved = for {
+ section <- xs
+ lines = sections filter { startsWithTag(raw, _, section) }
+ } yield lines
+
+ val end = startTag(raw, toBeRemoved.flatten.sortBy(_._1).toList)
+
+ if (end == raw.length - 2) raw else raw.substring(0, end) + "*/"
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala
new file mode 100644
index 000000000..b55aee719
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala
@@ -0,0 +1,174 @@
+package dotty.tools.dotc.util
+
+import scala.annotation.tailrec
+import scala.collection.mutable
+
+object DiffUtil {
+
+ private final val ANSI_DEFAULT = "\u001B[0m"
+ private final val ANSI_RED = "\u001B[31m"
+ private final val ANSI_GREEN = "\u001B[32m"
+
+ private final val DELETION_COLOR = ANSI_RED
+ private final val ADDITION_COLOR = ANSI_GREEN
+
+ @tailrec private def splitTokens(str: String, acc: List[String] = Nil): List[String] = {
+ if (str == "") {
+ acc.reverse
+ } else {
+ val head = str.charAt(0)
+ val (token, rest) = if (Character.isAlphabetic(head) || Character.isDigit(head)) {
+ str.span(c => Character.isAlphabetic(c) || Character.isDigit(c))
+ } else if (Character.isMirrored(head) || Character.isWhitespace(head)) {
+ str.splitAt(1)
+ } else {
+ str.span { c =>
+ !Character.isAlphabetic(c) && !Character.isDigit(c) &&
+ !Character.isMirrored(c) && !Character.isWhitespace(c)
+ }
+ }
+ splitTokens(rest, token :: acc)
+ }
+ }
+
+
+ /** @return a tuple of the (found, expected, changedPercentage) diffs as strings */
+ def mkColoredTypeDiff(found: String, expected: String): (String, String, Double) = {
+ var totalChange = 0
+ val foundTokens = splitTokens(found, Nil).toArray
+ val expectedTokens = splitTokens(expected, Nil).toArray
+
+ val diffExp = hirschberg(foundTokens, expectedTokens)
+ val diffAct = hirschberg(expectedTokens, foundTokens)
+
+ val exp = diffExp.collect {
+ case Unmodified(str) => str
+ case Inserted(str) =>
+ totalChange += str.length
+ ADDITION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+
+ val fnd = diffAct.collect {
+ case Unmodified(str) => str
+ case Inserted(str) =>
+ totalChange += str.length
+ DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+
+ (fnd, exp, totalChange.toDouble / (expected.length + found.length))
+ }
+
+ def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = {
+
+ val tokens = splitTokens(code, Nil).toArray
+ val lastTokens = splitTokens(lastCode, Nil).toArray
+
+ val diff = hirschberg(lastTokens, tokens)
+
+ diff.collect {
+ case Unmodified(str) => str
+ case Inserted(str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(old, str) if printDiffDel => DELETION_COLOR + old + ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(_, str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Deleted(str) if printDiffDel => DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+ }
+
+ private sealed trait Patch
+ private final case class Unmodified(str: String) extends Patch
+ private final case class Modified(original: String, str: String) extends Patch
+ private final case class Deleted(str: String) extends Patch
+ private final case class Inserted(str: String) extends Patch
+
+ private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = {
+ def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ if (x.isEmpty) {
+ builder += Inserted(y.mkString)
+ } else if (y.isEmpty) {
+ builder += Deleted(x.mkString)
+ } else if (x.length == 1 || y.length == 1) {
+ needlemanWunsch(x, y, builder)
+ } else {
+ val xlen = x.length
+ val xmid = xlen / 2
+ val ylen = y.length
+
+ val (x1, x2) = x.splitAt(xmid)
+ val leftScore = nwScore(x1, y)
+ val rightScore = nwScore(x2.reverse, y.reverse)
+ val scoreSum = (leftScore zip rightScore.reverse).map {
+ case (left, right) => left + right
+ }
+ val max = scoreSum.max
+ val ymid = scoreSum.indexOf(max)
+
+ val (y1, y2) = y.splitAt(ymid)
+ build(x1, y1, builder)
+ build(x2, y2, builder)
+ }
+ }
+ val builder = Array.newBuilder[Patch]
+ build(a, b, builder)
+ builder.result()
+ }
+
+ private def nwScore(x: Array[String], y: Array[String]): Array[Int] = {
+ def ins(s: String) = -2
+ def del(s: String) = -2
+ def sub(s1: String, s2: String) = if (s1 == s2) 2 else -1
+
+ val score = Array.fill(x.length + 1, y.length + 1)(0)
+ for (j <- 1 to y.length)
+ score(0)(j) = score(0)(j - 1) + ins(y(j - 1))
+ for (i <- 1 to x.length) {
+ score(i)(0) = score(i - 1)(0) + del(x(i - 1))
+ for (j <- 1 to y.length) {
+ val scoreSub = score(i - 1)(j - 1) + sub(x(i - 1), y(j - 1))
+ val scoreDel = score(i - 1)(j) + del(x(i - 1))
+ val scoreIns = score(i)(j - 1) + ins(y(j - 1))
+ score(i)(j) = scoreSub max scoreDel max scoreIns
+ }
+ }
+ Array.tabulate(y.length + 1)(j => score(x.length)(j))
+ }
+
+ private def needlemanWunsch(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ def similarity(a: String, b: String) = if (a == b) 2 else -1
+ val d = 1
+ val score = Array.tabulate(x.length + 1, y.length + 1) { (i, j) =>
+ if (i == 0) d * j
+ else if (j == 0) d * i
+ else 0
+ }
+ for (i <- 1 to x.length) {
+ for (j <- 1 to y.length) {
+ val mtch = score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))
+ val delete = score(i - 1)(j) + d
+ val insert = score(i)(j - 1) + d
+ score(i)(j) = mtch max insert max delete
+ }
+ }
+
+ var alignment = List.empty[Patch]
+ var i = x.length
+ var j = y.length
+ while (i > 0 || j > 0) {
+ if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) {
+ val newHead =
+ if (x(i - 1) == y(j - 1)) Unmodified(x(i - 1))
+ else Modified(x(i - 1), y(j - 1))
+ alignment = newHead :: alignment
+ i = i - 1
+ j = j - 1
+ } else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) {
+ alignment = Deleted(x(i - 1)) :: alignment
+ i = i - 1
+ } else {
+ alignment = Inserted(y(j - 1)) :: alignment
+ j = j - 1
+ }
+ }
+ builder ++= alignment
+ }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/util/DotClass.scala b/compiler/src/dotty/tools/dotc/util/DotClass.scala
new file mode 100644
index 000000000..cdb697a45
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/DotClass.scala
@@ -0,0 +1,12 @@
+package dotty.tools.dotc.util
+
+/** Adds standard functionality to a class.
+ * For now: Just the `unsupported` method.
+ */
+class DotClass {
+
+ /** Throws an `UnsupportedOperationException` with the given method name. */
+ def unsupported(methodName: String): Nothing =
+ throw new UnsupportedOperationException(s"$getClass.$methodName")
+
+}
diff --git a/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala b/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala
new file mode 100644
index 000000000..521947895
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala
@@ -0,0 +1,33 @@
+package dotty.tools
+package dotc
+package util
+
+import scala.collection.mutable
+
+trait FreshNameCreator {
+ def newName(prefix: String = ""): String
+
+ @deprecated("use newName(prefix)", "2.9.0")
+ def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
+ @deprecated("use newName()", "2.9.0")
+ def newName(pos: scala.reflect.internal.util.Position): String = newName()
+}
+
+object FreshNameCreator {
+ class Default extends FreshNameCreator {
+ protected var counter = 0
+ protected val counters = mutable.AnyRefMap[String, Int]() withDefaultValue 0
+
+ /**
+ * Create a fresh name with the given prefix. It is guaranteed
+ * that the returned name has never been returned by a previous
+ * call to this function (provided the prefix does not end in a digit).
+ */
+ def newName(prefix: String): String = {
+ val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
+ counters(safePrefix) += 1
+ val counter = counters(safePrefix)
+ if (prefix.isEmpty) "$" + counter + "$" else safePrefix + counter
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala
new file mode 100644
index 000000000..ff470ef5d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala
@@ -0,0 +1,146 @@
+package dotty.tools.dotc.util
+
+/** A hash set that allows some privileged protected access to its internals
+ */
+class HashSet[T >: Null <: AnyRef](initialCapacity: Int, loadFactor: Float = 0.25f) extends Set[T] {
+ private var used: Int = _
+ private var limit: Int = _
+ private var table: Array[AnyRef] = _
+
+ clear()
+
+ /** The number of elements in the set */
+ def size: Int = used
+
+ private def allocate(size: Int) = {
+ table = new Array[AnyRef](size)
+ limit = (size * loadFactor).toInt
+ }
+
+ /** Remove all elements from this set and set back to initial configuration */
+ def clear(): Unit = {
+ used = 0
+ allocate(initialCapacity)
+ }
+
+ /** Turn hashode `x` into a table index */
+ private def index(x: Int): Int = math.abs(x % table.length)
+
+ /** Hashcode, can be overridden */
+ def hash(x: T): Int = x.hashCode
+
+ /** Find entry such that `x equals entry`. If it exists, return it.
+ * If not, enter `x` in set and return `x`.
+ */
+ def findEntryOrUpdate(x: T): T = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while (entry ne null) {
+ if (x equals entry) return entry.asInstanceOf[T]
+ h = index(h + 1)
+ entry = table(h)
+ }
+ addEntryAt(h, x)
+ }
+
+ /** Add entry at `x` at index `idx` */
+ private def addEntryAt(idx: Int, x: T) = {
+ table(idx) = x
+ used += 1
+ if (used > limit) growTable()
+ x
+ }
+
+ /** The entry in the set such that `x equals entry`, or else `null`. */
+ def findEntry(x: T): T = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while ((entry ne null) && !(x equals entry)) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ entry.asInstanceOf[T]
+ }
+
+ private var rover: Int = -1
+
+ /** Add entry `x` to set */
+ def addEntry(x: T): Unit = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while (entry ne null) {
+ if (x equals entry) return
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ used += 1
+ if (used > (table.length >> 2)) growTable()
+ }
+
+ /** Add all entries in `xs` to set */
+ def addEntries(xs: TraversableOnce[T]): Unit = {
+ xs foreach addEntry
+ }
+
+ /** The iterator of all elements in the set */
+ def iterator = new Iterator[T] {
+ private var i = 0
+ def hasNext: Boolean = {
+ while (i < table.length && (table(i) eq null)) i += 1
+ i < table.length
+ }
+ def next(): T =
+ if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] }
+ else null
+ }
+
+ /** Privileged access: Find first entry with given hashcode */
+ protected def findEntryByHash(hashCode: Int): T = {
+ rover = index(hashCode)
+ nextEntryByHash(hashCode)
+ }
+
+ /** Privileged access: Find next entry with given hashcode. Needs to immediately
+ * follow a `findEntryByhash` or `nextEntryByHash` operation.
+ */
+ protected def nextEntryByHash(hashCode: Int): T = {
+ var entry = table(rover)
+ while (entry ne null) {
+ rover = index(rover + 1)
+ if (hash(entry.asInstanceOf[T]) == hashCode) return entry.asInstanceOf[T]
+ entry = table(rover)
+ }
+ null
+ }
+
+ /** Privileged access: Add entry `x` at the last position where an unsuccsessful
+ * `findEntryByHash` or `nextEntryByhash` operation returned. Needs to immediately
+ * follow a `findEntryByhash` or `nextEntryByHash` operation that was unsucessful,
+ * i.e. that returned `null`.
+ */
+ protected def addEntryAfterScan(x: T): T = addEntryAt(rover, x)
+
+ private def addOldEntry(x: T): Unit = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while (entry ne null) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ }
+
+ private def growTable(): Unit = {
+ val oldtable = table
+ allocate(table.length * 2)
+ var i = 0
+ while (i < oldtable.length) {
+ val entry = oldtable(i)
+ if (entry ne null) addOldEntry(entry.asInstanceOf[T])
+ i += 1
+ }
+ }
+
+ override def toString() = "HashSet(%d / %d)".format(used, table.length)
+}
diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala
new file mode 100644
index 000000000..5f53e81c4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala
@@ -0,0 +1,100 @@
+package dotty.tools.dotc.util
+
+import reflect.ClassTag
+import annotation.tailrec
+
+/** A least-recently-used cache for Key -> Value computations
+ * It currently keeps the last 8 associations, but this can be
+ * changed to anywhere between 2 and 16 by changing `LRUCache.Retained`.
+ *
+ * Implementation: We keep a ring of eight places, linked
+ * with the `next` data structure. The ring models a priority queue.
+ * `last` points to the last element of the queue, and
+ * `next(last)` to the first one. Lookups compare keys
+ * sequentially from first to last. Elements with successful lookup
+ * get promoted to be first in the queue. Elements are evicted
+ * at the `last` position.
+ */
+class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] {
+ import LRUCache._
+ val keys = new Array[Key](Retained)
+ val values = new Array[Value](Retained)
+ var next = new SixteenNibbles(initialRing.bits)
+ var last = Retained - 1 // value is arbitrary
+ var lastButOne: Int = last - 1
+
+ def first = next(last)
+
+ /** Lookup key, returning value or `null` for not found.
+ * As a side effect, sets `lastButOne` to the element before `last`
+ * if key was not found.
+ */
+ def lookup(key: Key): Value = {
+ @tailrec
+ def lookupNext(prev: Int, current: Int, nx: SixteenNibbles): Value = {
+ val follow = nx(current)
+ if (keys(current) eq key) {
+ // arrange so that found element is at position `first`.
+ if (current == last) last = prev
+ else if (prev != last) {
+ next = next.updated(prev, follow)
+ next = next.updated(current, first)
+ next = next.updated(last, current)
+ }
+ values(current)
+ } else if (current == last) {
+ lastButOne = prev
+ null
+ } else
+ lookupNext(current, follow, nx)
+ }
+ lookupNext(last, first, next)
+ }
+
+ /** Enter key/value in cache at position `last`.
+ * As a side effect, sets `last` to `lastButOne`.
+ * If `lastButOne` was set by a preceding unsuccessful `lookup`
+ * for the same key, this means that the new element is now the
+ * first in the queue. If there was no preceding lookup, the element
+ * is inserted at a random position in the queue.
+ */
+ def enter(key: Key, value: Value): Unit = {
+ keys(last) = key
+ values(last) = value
+ last = lastButOne
+ }
+
+ /** Invalidate key. The invalidated element becomes
+ * the last in the queue.
+ */
+ def invalidate(key: Key): Unit =
+ if (lookup(key) != null) {
+ keys(first) = null
+ last = first
+ }
+
+ def indices: Iterator[Int] = Iterator.iterate(first)(next.apply)
+
+ def keysIterator: Iterator[Key] =
+ indices take Retained map keys filter (_ != null)
+
+ override def toString = {
+ val assocs = keysIterator
+ .toList // double reverse so that lookups do not perturb order
+ .reverse
+ .map(key => s"$key -> ${lookup(key)}")
+ .reverse
+ s"LRUCache(${assocs.mkString(", ")})"
+ }
+}
+
+object LRUCache {
+
+ /** The number of retained elements in the cache; must be at most 16. */
+ val Retained = 16
+
+ /** The initial ring: 0 -> 1 -> ... -> 7 -> 0 */
+ val initialRing =
+ (new SixteenNibbles(0L) /: (0 until Retained))((nibbles, idx) =>
+ nibbles.updated(idx, (idx + 1) % Retained))
+}
diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
new file mode 100644
index 000000000..330d513fe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
@@ -0,0 +1,163 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package dotty.tools.dotc
+package util
+
+import core.Names._
+import core.Decorators._
+
+/** Provides functions to encode and decode Scala symbolic names.
+ * Also provides some constants.
+ */
+object NameTransformer {
+ // XXX Short term: providing a way to alter these without having to recompile
+ // the compiler before recompiling the compiler.
+ val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
+ val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
+ val MODULE_INSTANCE_NAME = "MODULE$"
+
+ private val nops = 128
+ private val ncodes = 26 * 26
+
+ private class OpCodes(val op: Char, val code: String, val next: OpCodes)
+
+ private val op2code = new Array[String](nops)
+ private val code2op = new Array[OpCodes](ncodes)
+ private def enterOp(op: Char, code: String) = {
+ op2code(op) = code
+ val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a'
+ code2op(c) = new OpCodes(op, code, code2op(c))
+ }
+
+ /* Note: decoding assumes opcodes are only ever lowercase. */
+ enterOp('~', "$tilde")
+ enterOp('=', "$eq")
+ enterOp('<', "$less")
+ enterOp('>', "$greater")
+ enterOp('!', "$bang")
+ enterOp('#', "$hash")
+ enterOp('%', "$percent")
+ enterOp('^', "$up")
+ enterOp('&', "$amp")
+ enterOp('|', "$bar")
+ enterOp('*', "$times")
+ enterOp('/', "$div")
+ enterOp('+', "$plus")
+ enterOp('-', "$minus")
+ enterOp(':', "$colon")
+ enterOp('\\', "$bslash")
+ enterOp('?', "$qmark")
+ enterOp('@', "$at")
+
+ /** Replace operator symbols by corresponding `\$opname`.
+ *
+ * @param name the string to encode
+ * @return the string with all recognized opchars replaced with their encoding
+ */
+ def encode[N <: Name](name: N): N = {
+ var buf: StringBuilder = null
+ val len = name.length
+ var i = 0
+ while (i < len) {
+ val c = name(i)
+ if (c < nops && (op2code(c) ne null)) {
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.slice(0, i))
+ }
+ buf.append(op2code(c))
+ /* Handle glyphs that are not valid Java/JVM identifiers */
+ }
+ else if (!Character.isJavaIdentifierPart(c)) {
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.slice(0, i))
+ }
+ buf.append("$u%04X".format(c.toInt))
+ }
+ else if (buf ne null) {
+ buf.append(c)
+ }
+ i += 1
+ }
+ if (buf eq null) name
+ else if (name.isTermName) buf.toString.toTermName.asInstanceOf[N]
+ else buf.toString.toTypeName.asInstanceOf[N]
+ }
+
+ /** Replace `\$opname` by corresponding operator symbol.
+ *
+ * @param name0 the string to decode
+ * @return the string with all recognized operator symbol encodings replaced with their name
+ */
+ def decode(name0: String): String = {
+ //System.out.println("decode: " + name);//DEBUG
+ val name = if (name0.endsWith("<init>")) name0.substring(0, name0.length() - ("<init>").length()) + "this"
+ else name0
+ var buf: StringBuilder = null
+ val len = name.length()
+ var i = 0
+ while (i < len) {
+ var ops: OpCodes = null
+ var unicode = false
+ val c = name charAt i
+ if (c == '$' && i + 2 < len) {
+ val ch1 = name.charAt(i + 1)
+ if ('a' <= ch1 && ch1 <= 'z') {
+ val ch2 = name.charAt(i + 2)
+ if ('a' <= ch2 && ch2 <= 'z') {
+ ops = code2op((ch1 - 'a') * 26 + ch2 - 'a')
+ while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next
+ if (ops ne null) {
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.substring(0, i))
+ }
+ buf.append(ops.op)
+ i += ops.code.length()
+ }
+ /* Handle the decoding of Unicode glyphs that are
+ * not valid Java/JVM identifiers */
+ } else if ((len - i) >= 6 && // Check that there are enough characters left
+ ch1 == 'u' &&
+ ((Character.isDigit(ch2)) ||
+ ('A' <= ch2 && ch2 <= 'F'))) {
+ /* Skip past "$u", next four should be hexadecimal */
+ val hex = name.substring(i + 2, i + 6)
+ try {
+ val str = Integer.parseInt(hex, 16).toChar
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.substring(0, i))
+ }
+ buf.append(str)
+ /* 2 for "$u", 4 for hexadecimal number */
+ i += 6
+ unicode = true
+ } catch {
+ case _:NumberFormatException =>
+ /* `hex` did not decode to a hexadecimal number, so
+ * do nothing. */
+ }
+ }
+ }
+ }
+ /* If we didn't see an opcode or encoded Unicode glyph, and the
+ buffer is non-empty, write the current character and advance
+ one */
+ if ((ops eq null) && !unicode) {
+ if (buf ne null)
+ buf.append(c)
+ i += 1
+ }
+ }
+ //System.out.println("= " + (if (buf == null) name else buf.toString()));//DEBUG
+ if (buf eq null) name else buf.toString()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Positions.scala b/compiler/src/dotty/tools/dotc/util/Positions.scala
new file mode 100644
index 000000000..c3890cc9a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Positions.scala
@@ -0,0 +1,173 @@
+package dotty.tools.dotc
+package util
+import language.implicitConversions
+
+/** Position format in little endian:
+ * Start: unsigned 26 Bits (works for source files up to 64M)
+ * End: unsigned 26 Bits
+ * Point: unsigned 12 Bits relative to start
+ * NoPosition encoded as -1L (this is a normally invalid position
+ * because point would lie beyond end.
+ */
+object Positions {
+
+ private val StartEndBits = 26
+ val StartEndMask: Long = (1L << StartEndBits) - 1
+ private val SyntheticPointDelta = (1 << (64 - StartEndBits * 2)) - 1
+
+ /** The maximal representable offset in a position */
+ val MaxOffset = StartEndMask
+
+ /** Convert offset `x` to an integer by sign extending the original
+ * field of `StartEndBits` width.
+ */
+ def offsetToInt(x: Int) =
+ x << (32 - StartEndBits) >> (32 - StartEndBits)
+
+ /** A position indicates a range between a start offset and an end offset.
+ * Positions can be synthetic or source-derived. A source-derived position
+ * has in addition a point lies somewhere between start and end. The point
+ * is roughly where the ^ would go if an error was diagnosed at that position.
+ * All quantities are encoded opaquely in a Long.
+ */
+ class Position(val coords: Long) extends AnyVal {
+
+ /** Is this position different from NoPosition? */
+ def exists = this != NoPosition
+
+ /** The start of this position. */
+ def start: Int = {
+ assert(exists)
+ (coords & StartEndMask).toInt
+ }
+
+ /** The end of this position */
+ def end: Int = {
+ assert(exists)
+ ((coords >>> StartEndBits) & StartEndMask).toInt
+ }
+
+ /** The point of this position, returns start for synthetic positions */
+ def point: Int = {
+ assert(exists)
+ val poff = pointDelta
+ if (poff == SyntheticPointDelta) start else start + poff
+ }
+
+ /** The difference between point and start in this position */
+ def pointDelta =
+ (coords >>> (StartEndBits * 2)).toInt
+
+ def orElse(that: Position) =
+ if (this.exists) this else that
+
+ /** The union of two positions. This is the least range that encloses
+ * both positions. It is always a synthetic position.
+ */
+ def union(that: Position) =
+ if (!this.exists) that
+ else if (!that.exists) this
+ else Position(this.start min that.start, this.end max that.end, this.point)
+
+ /** Does the range of this position contain the one of that position? */
+ def contains(that: Position): Boolean =
+ !that.exists || exists && (start <= that.start && end >= that.end)
+
+ /** Is this position synthetic? */
+ def isSynthetic = pointDelta == SyntheticPointDelta
+
+ /** Is this position source-derived? */
+ def isSourceDerived = !isSynthetic
+
+ /** A position where all components are shifted by a given `offset`
+ * relative to this position.
+ */
+ def shift(offset: Int) =
+ if (exists) fromOffsets(start + offset, end + offset, pointDelta)
+ else this
+
+ /** The zero-extent position with start and end at the point of this position */
+ def focus = if (exists) Position(point) else NoPosition
+
+ /** The zero-extent position with start and end at the start of this position */
+ def startPos = if (exists) Position(start) else NoPosition
+
+ /** The zero-extent position with start and end at the end of this position */
+ def endPos = if (exists) Position(end) else NoPosition
+
+ /** A copy of this position with a different start */
+ def withStart(start: Int) =
+ fromOffsets(start, this.end, if (isSynthetic) SyntheticPointDelta else this.point - start)
+
+ /** A copy of this position with a different end */
+ def withEnd(end: Int) = fromOffsets(this.start, end, pointDelta)
+
+ /** A copy of this position with a different point */
+ def withPoint(point: Int) = fromOffsets(this.start, this.end, point - this.start)
+
+ /** A synthetic copy of this position */
+ def toSynthetic = if (isSynthetic) this else Position(start, end)
+
+ override def toString = {
+ val (left, right) = if (isSynthetic) ("<", ">") else ("[", "]")
+ if (exists)
+ s"$left$start..${if (point == start) "" else s"$point.."}$end$right"
+ else
+ s"${left}no position${right}"
+ }
+ }
+
+ private def fromOffsets(start: Int, end: Int, pointDelta: Int) = {
+ //assert(start <= end || start == 1 && end == 0, s"$start..$end")
+ new Position(
+ (start & StartEndMask).toLong |
+ ((end & StartEndMask).toLong << StartEndBits) |
+ (pointDelta.toLong << (StartEndBits * 2)))
+ }
+
+ /** A synthetic position with given start and end */
+ def Position(start: Int, end: Int): Position = {
+ val pos = fromOffsets(start, end, SyntheticPointDelta)
+ assert(pos.isSynthetic)
+ pos
+ }
+
+ /** A source-derived position with given start, end, and point delta */
+ def Position(start: Int, end: Int, point: Int): Position = {
+ val pointDelta = (point - start) max 0
+ val pos = fromOffsets(start, end, if (pointDelta >= SyntheticPointDelta) 0 else pointDelta)
+ assert(pos.isSourceDerived)
+ pos
+ }
+
+ /** A synthetic zero-extent position that starts and ends at given `start`. */
+ def Position(start: Int): Position = Position(start, start)
+
+ /** A sentinel for a non-existing position */
+ val NoPosition = Position(1, 0)
+
+ /** The coordinate of a symbol. This is either an index or
+ * a zero-range position.
+ */
+ class Coord(val encoding: Int) extends AnyVal {
+ def isIndex = encoding > 0
+ def isPosition = encoding <= 0
+ def toIndex: Int = {
+ assert(isIndex)
+ encoding - 1
+ }
+ def toPosition = {
+ assert(isPosition)
+ if (this == NoCoord) NoPosition else Position(-1 - encoding)
+ }
+ }
+
+ /** An index coordinate */
+ implicit def indexCoord(n: Int): Coord = new Coord(n + 1)
+ implicit def positionCoord(pos: Position): Coord =
+ if (pos.exists) new Coord(-(pos.point + 1))
+ else NoCoord
+
+ /** A sentinel for a missing coordinate */
+ val NoCoord = new Coord(0)
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Property.scala b/compiler/src/dotty/tools/dotc/util/Property.scala
new file mode 100644
index 000000000..608fc88e6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Property.scala
@@ -0,0 +1,10 @@
+package dotty.tools.dotc.util
+
+/** Defines a key type with which to tag properties, such as attachments
+ * or context properties
+ */
+object Property {
+
+ /** The class of keys for properties of type V */
+ class Key[+V]
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/util/Set.scala b/compiler/src/dotty/tools/dotc/util/Set.scala
new file mode 100644
index 000000000..3e906c6a8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Set.scala
@@ -0,0 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools.dotc.util
+
+/** A common class for lightweight sets.
+ */
+abstract class Set[T >: Null] {
+
+ def findEntry(x: T): T
+
+ def addEntry(x: T): Unit
+
+ def iterator: Iterator[T]
+
+ def foreach[U](f: T => U): Unit = iterator foreach f
+
+ def apply(x: T): Boolean = contains(x)
+
+ def contains(x: T): Boolean =
+ findEntry(x) != null
+
+ def toList = iterator.toList
+
+ def clear: Unit
+}
diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
new file mode 100644
index 000000000..477449074
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
@@ -0,0 +1,287 @@
+package dotty.tools.dotc
+package util
+
+import java.io.{File, FileInputStream, PrintStream}
+import java.lang.Long.toHexString
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+import scala.reflect.internal.Flags
+import scala.reflect.internal.pickling.PickleFormat
+import core.unpickleScala2.PickleBuffer
+import core.Names._
+
+object ShowPickled {
+ import PickleFormat._
+
+ case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) {
+ def isName = tag == TERMname || tag == TYPEname
+ def hasName = tag match {
+ case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym | EXTref | EXTMODCLASSref => true
+ case _ => false
+ }
+ def readName =
+ if (isName) new String(bytes, "UTF-8")
+ else sys.error("%s is no name" format tagName)
+ def nameIndex =
+ if (hasName) readNat(bytes, 0)
+ else sys.error("%s has no name" format tagName)
+
+ def tagName = tag2string(tag)
+ override def toString = "%d,%d: %s".format(num, startIndex, tagName)
+ }
+
+ case class PickleBufferEntryList(entries: IndexedSeq[PickleBufferEntry]) {
+ def nameAt(idx: Int) = {
+ val entry = entries(idx)
+ if (entry.isName) entry.readName
+ else if (entry.hasName) entries(entry.nameIndex).readName
+ else "?"
+ }
+ }
+
+ def makeEntryList(buf: PickleBuffer, index: Array[Int]) = {
+ val entries = buf.toIndexedSeq.zipWithIndex map {
+ case ((tag, data), num) => PickleBufferEntry(num, index(num), tag, data)
+ }
+
+ PickleBufferEntryList(entries)
+ }
+
+ def tag2string(tag: Int): String = tag match {
+ case TERMname => "TERMname"
+ case TYPEname => "TYPEname"
+ case NONEsym => "NONEsym"
+ case TYPEsym => "TYPEsym"
+ case ALIASsym => "ALIASsym"
+ case CLASSsym => "CLASSsym"
+ case MODULEsym => "MODULEsym"
+ case VALsym => "VALsym"
+ case EXTref => "EXTref"
+ case EXTMODCLASSref => "EXTMODCLASSref"
+ case NOtpe => "NOtpe"
+ case NOPREFIXtpe => "NOPREFIXtpe"
+ case THIStpe => "THIStpe"
+ case SINGLEtpe => "SINGLEtpe"
+ case CONSTANTtpe => "CONSTANTtpe"
+ case TYPEREFtpe => "TYPEREFtpe"
+ case TYPEBOUNDStpe => "TYPEBOUNDStpe"
+ case REFINEDtpe => "REFINEDtpe"
+ case CLASSINFOtpe => "CLASSINFOtpe"
+ case METHODtpe => "METHODtpe"
+ case POLYtpe => "POLYtpe"
+ case IMPLICITMETHODtpe => "METHODtpe" // IMPLICITMETHODtpe no longer used.
+ case SUPERtpe => "SUPERtpe"
+ case LITERALunit => "LITERALunit"
+ case LITERALboolean => "LITERALboolean"
+ case LITERALbyte => "LITERALbyte"
+ case LITERALshort => "LITERALshort"
+ case LITERALchar => "LITERALchar"
+ case LITERALint => "LITERALint"
+ case LITERALlong => "LITERALlong"
+ case LITERALfloat => "LITERALfloat"
+ case LITERALdouble => "LITERALdouble"
+ case LITERALstring => "LITERALstring"
+ case LITERALnull => "LITERALnull"
+ case LITERALclass => "LITERALclass"
+ case LITERALenum => "LITERALenum"
+ case SYMANNOT => "SYMANNOT"
+ case CHILDREN => "CHILDREN"
+ case ANNOTATEDtpe => "ANNOTATEDtpe"
+ case ANNOTINFO => "ANNOTINFO"
+ case ANNOTARGARRAY => "ANNOTARGARRAY"
+ // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
+ case EXISTENTIALtpe => "EXISTENTIALtpe"
+ case TREE => "TREE"
+ case MODIFIERS => "MODIFIERS"
+
+ case _ => "***BAD TAG***(" + tag + ")"
+ }
+
+ /** Extremely regrettably, essentially copied from PickleBuffer.
+ */
+ def readNat(data: Array[Byte], index: Int): Int = {
+ var idx = index
+ var result = 0L
+ var b = 0L
+ do {
+ b = data(idx)
+ idx += 1
+ result = (result << 7) + (b & 0x7f)
+ } while((b & 0x80) != 0L)
+
+ result.toInt
+ }
+
+ def printFile(buf: PickleBuffer, out: PrintStream = System.out): Unit = {
+ out.println("Version " + buf.readNat() + "." + buf.readNat())
+ val index = buf.createIndex
+ val entryList = makeEntryList(buf, index)
+ buf.readIndex = 0
+
+ def p(s: String) = out print s
+
+ def printNameRef(): Unit = {
+ val idx = buf.readNat()
+ val name = entryList nameAt idx
+ val toPrint = " %s(%s)".format(idx, name)
+
+ out print toPrint
+ }
+
+ def printNat() = p(" " + buf.readNat())
+ def printReadNat(x: Int) = p(" " + x)
+
+ def printSymbolRef() = printNat()
+ def printTypeRef() = printNat()
+ def printConstantRef() = printNat()
+ def printAnnotInfoRef() = printNat()
+ def printConstAnnotArgRef() = printNat()
+ def printAnnotArgRef() = printNat()
+
+ def printSymInfo(end: Int, isType: Boolean): Unit = {
+ printNameRef()
+ printSymbolRef()
+ val pflags = buf.readLongNat()
+ def printFlags(privateWithin: Option[Int]) = {
+ val accessBoundary = (
+ for (idx <- privateWithin) yield {
+ val s = entryList nameAt idx
+ idx + "(" + s + ")"
+ }
+ )
+ val flagString = PickleBuffer.unpickleScalaFlags(pflags, isType).toString
+ out.print(" %s[%s]".format(toHexString(pflags), flagString))
+ }
+
+ /** Might be info or privateWithin */
+ val x = buf.readNat()
+ if (buf.readIndex == end) {
+ printFlags(None)
+ printReadNat(x)
+ }
+ else {
+ printFlags(Some(x))
+ printTypeRef()
+ }
+ }
+
+ /** Note: the entries which require some semantic analysis to be correctly
+ * interpreted are for the most part going to tell you the wrong thing.
+ * It's not so easy to duplicate the logic applied in the UnPickler.
+ */
+ def printEntry(i: Int): Unit = {
+ buf.readIndex = index(i)
+ p(i + "," + buf.readIndex + ": ")
+ val tag = buf.readByte()
+ out.print(tag2string(tag))
+ val len = buf.readNat()
+ val end = len + buf.readIndex
+ p(" " + len + ":")
+ tag match {
+ case TERMname =>
+ out.print(" ")
+ out.print(termName(buf.bytes, buf.readIndex, len).toString)
+ buf.readIndex = end
+ case TYPEname =>
+ out.print(" ")
+ out.print(typeName(buf.bytes, buf.readIndex, len))
+ buf.readIndex = end
+ case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym =>
+ printSymInfo(end, tag == TYPEsym || tag == ALIASsym || tag == CLASSsym)
+ if (tag == CLASSsym && (buf.readIndex < end)) printTypeRef()
+ case EXTref | EXTMODCLASSref =>
+ printNameRef()
+ if (buf.readIndex < end) { printSymbolRef() }
+ case THIStpe =>
+ printSymbolRef()
+ case SINGLEtpe =>
+ printTypeRef(); printSymbolRef()
+ case CONSTANTtpe =>
+ printTypeRef(); printConstantRef()
+ case TYPEREFtpe =>
+ printTypeRef(); printSymbolRef(); buf.until(end, printTypeRef)
+ case TYPEBOUNDStpe =>
+ printTypeRef(); printTypeRef()
+ case REFINEDtpe =>
+ printSymbolRef(); buf.until(end, printTypeRef)
+ case CLASSINFOtpe =>
+ printSymbolRef(); buf.until(end, printTypeRef)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ printTypeRef(); buf.until(end, printTypeRef)
+ case POLYtpe =>
+ printTypeRef(); buf.until(end, printSymbolRef)
+ case LITERALboolean =>
+ out.print(if (buf.readLong(len) == 0L) " false" else " true")
+ case LITERALbyte =>
+ out.print(" " + buf.readLong(len).toByte)
+ case LITERALshort =>
+ out.print(" " + buf.readLong(len).toShort)
+ case LITERALchar =>
+ out.print(" " + buf.readLong(len).toChar)
+ case LITERALint =>
+ out.print(" " + buf.readLong(len).toInt)
+ case LITERALlong =>
+ out.print(" " + buf.readLong(len))
+ case LITERALfloat =>
+ out.print(" " + intBitsToFloat(buf.readLong(len).toInt))
+ case LITERALdouble =>
+ out.print(" " + longBitsToDouble(buf.readLong(len)))
+ case LITERALstring =>
+ printNameRef()
+ case LITERALenum =>
+ printSymbolRef()
+ case LITERALnull =>
+ out.print(" <null>")
+ case LITERALclass =>
+ printTypeRef()
+ case CHILDREN =>
+ printSymbolRef(); buf.until(end, printSymbolRef)
+ case SYMANNOT =>
+ printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
+ case ANNOTATEDtpe =>
+ printTypeRef(); buf.until(end, printAnnotInfoRef)
+ case ANNOTINFO =>
+ printTypeRef(); buf.until(end, printAnnotArgRef)
+ case ANNOTARGARRAY =>
+ buf.until(end, printConstAnnotArgRef)
+ case EXISTENTIALtpe =>
+ printTypeRef(); buf.until(end, printSymbolRef)
+
+ case _ =>
+ }
+ out.println()
+ if (buf.readIndex != end) {
+ out.println("BAD ENTRY END: computed = %d, actual = %d, bytes = %s".format(
+ end, buf.readIndex, buf.bytes.slice(index(i), (end max buf.readIndex)).mkString(", ")
+ ))
+ }
+ }
+
+ for (i <- 0 until index.length) printEntry(i)
+ }
+
+/*
+ *
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
+ def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
+ try Some(new PickleBuffer(data, 0, data.length))
+ catch { case _: Exception => None }
+
+ def show(what: String, pickle: PickleBuffer) = {
+ Console.println(what)
+ val saved = pickle.readIndex
+ pickle.readIndex = 0
+ printFile(pickle, Console.out)
+ pickle.readIndex = saved
+ }
+
+ def main(args: Array[String]) {
+ args foreach { arg =>
+ (fromFile(arg) orElse fromName(arg)) match {
+ case Some(pb) => show(arg + ":", pb)
+ case _ => Console.println("Cannot read " + arg)
+ }
+ }
+ }*/
+}
diff --git a/compiler/src/dotty/tools/dotc/util/SimpleMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleMap.scala
new file mode 100644
index 000000000..b8668d7e4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SimpleMap.scala
@@ -0,0 +1,223 @@
+package dotty.tools.dotc.util
+
+import collection.mutable.ListBuffer
+
+abstract class SimpleMap[K <: AnyRef, +V >: Null <: AnyRef] extends (K => V) {
+ def size: Int
+ def apply(k: K): V
+ def remove(k: K): SimpleMap[K, V]
+ def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleMap[K, V1]
+ def contains(k: K): Boolean = apply(k) != null
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleMap[K, V1]
+ def foreachBinding(f: (K, V) => Unit): Unit
+ def map2[T](f: (K, V) => T): List[T] = {
+ val buf = new ListBuffer[T]
+ foreachBinding((k, v) => buf += f(k, v))
+ buf.toList
+ }
+ def keys: List[K] = map2((k, v) => k)
+ def toList: List[(K, V)] = map2((k, v) => (k, v))
+ override def toString = {
+ def assocToString(key: K, value: V) = s"$key -> $value"
+ map2(assocToString) mkString ("(", ", ", ")")
+ }
+}
+
+object SimpleMap {
+
+ private val CompactifyThreshold = 4
+
+ private object myEmpty extends SimpleMap[AnyRef, Null] {
+ def size = 0
+ def apply(k: AnyRef) = null
+ def remove(k: AnyRef) = this
+ def updated[V1 >: Null <: AnyRef](k: AnyRef, v: V1) = new Map1(k, v)
+ def mapValuesNow[V1 >: Null <: AnyRef](f: (AnyRef, V1) => V1) = this
+ def foreachBinding(f: (AnyRef, Null) => Unit) = ()
+ }
+
+ def Empty[K <: AnyRef] = myEmpty.asInstanceOf[SimpleMap[K, Null]]
+
+ class Map1[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V) extends SimpleMap[K, V] {
+ def size = 1
+ def apply(k: K) =
+ if (k == k1) v1
+ else null
+ def remove(k: K) =
+ if (k == k1) Empty.asInstanceOf[SimpleMap[K, V]]
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map1(k, v)
+ else new Map2(k1, v1, k, v)
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1)
+ if (v1 eq w1) this else new Map1(k1, w1)
+ }
+ def foreachBinding(f: (K, V) => Unit) = f(k1, v1)
+ }
+
+ class Map2[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V, k2: K, v2: V) extends SimpleMap[K, V] {
+ def size = 2
+ def apply(k: K) =
+ if (k == k1) v1
+ else if (k == k2) v2
+ else null
+ def remove(k: K) =
+ if (k == k1) new Map1(k2, v2)
+ else if (k == k2) new Map1(k1, v1)
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map2(k, v, k2, v2)
+ else if (k == k2) new Map2(k1, v1, k, v)
+ else new Map3(k1, v1, k2, v2, k, v)
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1); val w2 = f(k2, v2)
+ if ((v1 eq w1) && (v2 eq w2)) this
+ else new Map2(k1, w1, k2, w2)
+ }
+ def foreachBinding(f: (K, V) => Unit) = { f(k1, v1); f(k2, v2) }
+ }
+
+ class Map3[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V) extends SimpleMap[K, V] {
+ def size = 3
+ def apply(k: K) =
+ if (k == k1) v1
+ else if (k == k2) v2
+ else if (k == k3) v3
+ else null
+ def remove(k: K) =
+ if (k == k1) new Map2(k2, v2, k3, v3)
+ else if (k == k2) new Map2(k1, v1, k3, v3)
+ else if (k == k3) new Map2(k1, v1, k2, v2)
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map3(k, v, k2, v2, k3, v3)
+ else if (k == k2) new Map3(k1, v1, k, v, k3, v3)
+ else if (k == k3) new Map3(k1, v1, k2, v2, k, v)
+ else new Map4(k1, v1, k2, v2, k3, v3, k, v)
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1); val w2 = f(k2, v2); val w3 = f(k3, v3)
+ if ((v1 eq w1) && (v2 eq w2) && (v3 eq w3)) this
+ else new Map3(k1, w1, k2, w2, k3, w3)
+ }
+ def foreachBinding(f: (K, V) => Unit) = { f(k1, v1); f(k2, v2); f(k3, v3) }
+ }
+
+ class Map4[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V, k4: K, v4: V) extends SimpleMap[K, V] {
+ def size = 4
+ def apply(k: K) =
+ if (k == k1) v1
+ else if (k == k2) v2
+ else if (k == k3) v3
+ else if (k == k4) v4
+ else null
+ def remove(k: K) =
+ if (k == k1) new Map3(k2, v2, k3, v3, k4, v4)
+ else if (k == k2) new Map3(k1, v1, k3, v3, k4, v4)
+ else if (k == k3) new Map3(k1, v1, k2, v2, k4, v4)
+ else if (k == k4) new Map3(k1, v1, k2, v2, k3, v3)
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map4(k, v, k2, v2, k3, v3, k4, v4)
+ else if (k == k2) new Map4(k1, v1, k, v, k3, v3, k4, v4)
+ else if (k == k3) new Map4(k1, v1, k2, v2, k, v, k4, v4)
+ else if (k == k4) new Map4(k1, v1, k2, v2, k3, v3, k, v)
+ else new MapMore(Array[AnyRef](k1, v1, k2, v2, k3, v3, k4, v4, k, v))
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1); val w2 = f(k2, v2); val w3 = f(k3, v3); val w4 = f(k4, v4)
+ if ((v1 eq w1) && (v2 eq w2) && (v3 eq w3) && (v4 eq w4)) this
+ else new Map4(k1, w1, k2, w2, k3, w3, k4, w4)
+ }
+ def foreachBinding(f: (K, V) => Unit) = { f(k1, v1); f(k2, v2); f(k3, v3); f(k4, v4) }
+ }
+
+ class MapMore[K <: AnyRef, +V >: Null <: AnyRef](bindings: Array[AnyRef]) extends SimpleMap[K, V] {
+ private def key(i: Int): K = bindings(i).asInstanceOf[K]
+ private def value(i: Int): V = bindings(i + 1).asInstanceOf[V]
+
+ def size = bindings.length / 2
+
+ def apply(k: K): V = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k) return value(i)
+ i += 2
+ }
+ null
+ }
+
+ def remove(k: K): SimpleMap[K, V] = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k) return {
+ if (size == CompactifyThreshold) {
+ var m: SimpleMap[K, V] = Empty[K]
+ for (j <- 0 until bindings.length by 2)
+ if (j != i) m = m.updated(key(j), value(j))
+ m
+ } else {
+ val bindings1 = new Array[AnyRef](bindings.length - 2)
+ Array.copy(bindings, 0, bindings1, 0, i)
+ Array.copy(bindings, i + 2, bindings1, i, bindings1.length - i)
+ new MapMore(bindings1)
+ }
+ }
+ i += 2
+ }
+ this
+ }
+
+ def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleMap[K, V] = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k)
+ return {
+ if (v eq bindings(i + 1)) this
+ else {
+ val bindings1 = bindings.clone
+ bindings1(i + 1) = v
+ new MapMore(bindings1)
+ }
+ }
+ i += 2
+ }
+ val bindings2 = new Array[AnyRef](bindings.length + 2)
+ Array.copy(bindings, 0, bindings2, 0, bindings.length)
+ bindings2(bindings.length) = k
+ bindings2(bindings.length + 1) = v
+ new MapMore(bindings2)
+ }
+
+ override def contains(k: K): Boolean = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k) return true
+ i += 2
+ }
+ false
+ }
+
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ var bindings1: Array[AnyRef] = bindings
+ var i = 0
+ while (i < bindings.length) {
+ val v = value(i)
+ val v1 = f(key(i), v)
+ if ((v1 ne v) && (bindings1 eq bindings))
+ bindings1 = bindings.clone
+ bindings1(i) = bindings(i)
+ bindings1(i + 1) = v1
+ i += 2
+ }
+ if (bindings1 eq bindings) this else new MapMore(bindings1)
+ }
+
+ def foreachBinding(f: (K, V) => Unit) = {
+ var i = 0
+ while (i < bindings.length) {
+ f(key(i), value(i))
+ i += 2
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
new file mode 100644
index 000000000..93817604e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
@@ -0,0 +1,28 @@
+package dotty.tools.dotc.util
+
+/** An efficient implementation of sequences of 16 indexed elements with
+ * values 0..15 in a single Long.
+ *
+ */
+class SixteenNibbles(val bits: Long) extends AnyVal {
+ import SixteenNibbles._
+
+ def apply(idx: Int): Int =
+ (bits >>> (idx * Width)).toInt & Mask
+
+ def updated(idx: Int, value: Int): SixteenNibbles =
+ new SixteenNibbles(
+ (bits & ~(LongMask << (idx * Width))) |
+ ((value & Mask).toLong << (idx * Width)))
+
+ def elements: IndexedSeq[Int] = (0 until 16) map apply
+
+ override def toString =
+ s"SixteenNibbles(${elements.mkString(", ")})"
+}
+
+object SixteenNibbles {
+ final val Width = 4
+ final val Mask = (1 << Width) - 1
+ final val LongMask = Mask.toLong
+}
diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
new file mode 100644
index 000000000..1d4c9c2ab
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
@@ -0,0 +1,145 @@
+package dotty.tools
+package dotc
+package util
+
+import scala.collection.mutable.ArrayBuffer
+import dotty.tools.io._
+import annotation.tailrec
+import java.util.regex.Pattern
+import java.io.IOException
+import Chars._
+import ScriptSourceFile._
+import Positions._
+import scala.io.Codec
+
+import java.util.Optional
+
+object ScriptSourceFile {
+ @sharable private val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE)
+ private val headerStarts = List("#!", "::#!")
+
+ def apply(file: AbstractFile, content: Array[Char]) = {
+ /** Length of the script header from the given content, if there is one.
+ * The header begins with "#!" or "::#!" and ends with a line starting
+ * with "!#" or "::!#".
+ */
+ val headerLength =
+ if (headerStarts exists (content startsWith _)) {
+ val matcher = headerPattern matcher content.mkString
+ if (matcher.find) matcher.end
+ else throw new IOException("script file does not close its header with !# or ::!#")
+ } else 0
+ new SourceFile(file, content drop headerLength) {
+ override val underlying = new SourceFile(file, content)
+ }
+ }
+}
+
+case class SourceFile(file: AbstractFile, content: Array[Char]) extends interfaces.SourceFile {
+
+ def this(_file: AbstractFile, codec: Codec) = this(_file, new String(_file.toByteArray, codec.charSet).toCharArray)
+ def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
+ def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
+
+ /** Tab increment; can be overridden */
+ def tabInc = 8
+
+ override def name = file.name
+ override def path = file.path
+ override def jfile = Optional.ofNullable(file.file)
+
+ override def equals(that : Any) = that match {
+ case that : SourceFile => file.path == that.file.path && start == that.start
+ case _ => false
+ }
+ override def hashCode = file.path.## + start.##
+
+ def apply(idx: Int) = content.apply(idx)
+
+ val length = content.length
+
+ /** true for all source files except `NoSource` */
+ def exists: Boolean = true
+
+ /** The underlying source file */
+ def underlying: SourceFile = this
+
+ /** The start of this file in the underlying source file */
+ def start = 0
+
+ def atPos(pos: Position): SourcePosition =
+ if (pos.exists) SourcePosition(underlying, pos)
+ else NoSourcePosition
+
+ def isSelfContained = underlying eq this
+
+ /** Map a position to a position in the underlying source file.
+ * For regular source files, simply return the argument.
+ */
+ def positionInUltimateSource(position: SourcePosition): SourcePosition =
+ SourcePosition(underlying, position.pos shift start)
+
+ private def isLineBreak(idx: Int) =
+ if (idx >= length) false else {
+ val ch = content(idx)
+ // don't identify the CR in CR LF as a line break, since LF will do.
+ if (ch == CR) (idx + 1 == length) || (content(idx + 1) != LF)
+ else isLineBreakChar(ch)
+ }
+
+ private def calculateLineIndices(cs: Array[Char]) = {
+ val buf = new ArrayBuffer[Int]
+ buf += 0
+ for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
+ buf += cs.length // sentinel, so that findLine below works smoother
+ buf.toArray
+ }
+ private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
+
+ /** Map line to offset of first character in line */
+ def lineToOffset(index: Int): Int = lineIndices(index)
+
+ /** A cache to speed up offsetToLine searches to similar lines */
+ private var lastLine = 0
+
+ /** Convert offset to line in this source file
+ * Lines are numbered from 0
+ */
+ def offsetToLine(offset: Int): Int = {
+ lastLine = Util.bestFit(lineIndices, lineIndices.length, offset, lastLine)
+ lastLine
+ }
+
+ /** The index of the first character of the line containing position `offset` */
+ def startOfLine(offset: Int): Int = {
+ require(offset >= 0)
+ lineToOffset(offsetToLine(offset))
+ }
+
+ /** The start index of the line following the one containing position `offset` */
+ def nextLine(offset: Int): Int =
+ lineToOffset(offsetToLine(offset) + 1 min lineIndices.length - 1)
+
+ /** The content of the line containing position `offset` */
+ def lineContent(offset: Int): String =
+ content.slice(startOfLine(offset), nextLine(offset)).mkString
+
+ /** The column corresponding to `offset`, starting at 0 */
+ def column(offset: Int): Int = {
+ var idx = startOfLine(offset)
+ var col = 0
+ while (idx != offset) {
+ col += (if (content(idx) == '\t') (tabInc - col) % tabInc else 1)
+ idx += 1
+ }
+ col
+ }
+
+ override def toString = file.toString
+}
+
+@sharable object NoSource extends SourceFile("<no source>", Nil) {
+ override def exists = false
+ override def atPos(pos: Position): SourcePosition = NoSourcePosition
+}
+
diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
new file mode 100644
index 000000000..aad4995d8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
@@ -0,0 +1,57 @@
+package dotty.tools
+package dotc
+package util
+
+import Positions.{Position, NoPosition}
+
+/** A source position is comprised of a position in a source file */
+case class SourcePosition(source: SourceFile, pos: Position, outer: SourcePosition = NoSourcePosition)
+extends interfaces.SourcePosition {
+ def exists = pos.exists
+
+ def lineContent: String = source.lineContent(point)
+
+ def point: Int = pos.point
+ /** The line of the position, starting at 0 */
+ def line: Int = source.offsetToLine(point)
+
+ /** The lines of the position */
+ def lines: List[Int] =
+ List.range(source.offsetToLine(start), source.offsetToLine(end + 1)) match {
+ case Nil => line :: Nil
+ case xs => xs
+ }
+
+ def lineOffsets: List[Int] =
+ lines.map(source.lineToOffset(_))
+
+ def lineContent(lineNumber: Int): String =
+ source.lineContent(source.lineToOffset(lineNumber))
+
+ def beforeAndAfterPoint: (List[Int], List[Int]) =
+ lineOffsets.partition(_ <= point)
+
+ /** The column of the position, starting at 0 */
+ def column: Int = source.column(point)
+
+ def start: Int = pos.start
+ def startLine: Int = source.offsetToLine(start)
+ def startColumn: Int = source.column(start)
+
+ def end: Int = pos.end
+ def endLine: Int = source.offsetToLine(end)
+ def endColumn: Int = source.column(end)
+
+ def withOuter(outer: SourcePosition) = new SourcePosition(source, pos, outer)
+
+ override def toString =
+ if (source.exists) s"${source.file}:${line + 1}"
+ else s"(no source file, offset = ${pos.point})"
+}
+
+/** A sentinel for a non-existing source position */
+@sharable object NoSourcePosition extends SourcePosition(NoSource, NoPosition) {
+ override def toString = "?"
+ override def withOuter(outer: SourcePosition) = outer
+}
+
diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala
new file mode 100644
index 000000000..b7e0996f5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Stats.scala
@@ -0,0 +1,78 @@
+package dotty.tools
+package dotc
+package util
+
+import core.Contexts._
+import collection.mutable
+
+@sharable object Stats {
+
+ final val enabled = false
+
+ /** The period in ms in which stack snapshots are displayed */
+ final val HeartBeatPeriod = 250
+
+ var monitored = false
+
+ @volatile private var stack: List[String] = Nil
+
+ val hits = new mutable.HashMap[String, Int] {
+ override def default(key: String): Int = 0
+ }
+
+ @inline
+ def record(fn: String, n: Int = 1) =
+ if (enabled) doRecord(fn, n)
+
+ private def doRecord(fn: String, n: Int) =
+ if (monitored) {
+ val name = if (fn.startsWith("member-")) "member" else fn
+ hits(name) += n
+ }
+
+ @inline
+ def track[T](fn: String)(op: => T) =
+ if (enabled) doTrack(fn)(op) else op
+
+ def doTrack[T](fn: String)(op: => T) =
+ if (monitored) {
+ stack = fn :: stack
+ record(fn)
+ try op
+ finally stack = stack.tail
+ } else op
+
+ class HeartBeat extends Thread() {
+ @volatile private[Stats] var continue = true
+
+ private def printStack(stack: List[String]): Unit = stack match {
+ case str :: rest =>
+ printStack(rest)
+ print(s"-> $str ")
+ case Nil =>
+ println()
+ print("|")
+ }
+
+ override final def run(): Unit = {
+ Thread.sleep(HeartBeatPeriod)
+ printStack(stack)
+ if (continue) run()
+ }
+ }
+
+ def monitorHeartBeat[T](op: => T)(implicit ctx: Context) = {
+ if (ctx.settings.Yheartbeat.value) {
+ var hb = new HeartBeat()
+ hb.start()
+ monitored = true
+ try op
+ finally {
+ hb.continue = false
+ println()
+ println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n")
+ println(s"sizes: ${ctx.base.uniquesSizes}")
+ }
+ } else op
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Util.scala b/compiler/src/dotty/tools/dotc/util/Util.scala
new file mode 100644
index 000000000..0d37f687b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Util.scala
@@ -0,0 +1,32 @@
+package dotty.tools.dotc.util
+import reflect.ClassTag
+
+object Util {
+
+ /** The index `i` in `candidates.indices` such that `candidates(i) <= x` and
+ * `candidates(i)` is closest to `x`, determined by binary search, or -1
+ * if `x < candidates(0)`.
+ * @param hint If between 0 and `candidates.length` use this
+ * as the first search point, otherwise use
+ * `candidates.length/2`.
+ * @pre candidates is sorted
+ */
+ def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = {
+ def recur(lo: Int, hi: Int, mid: Int): Int =
+ if (x < candidates(mid))
+ recur(lo, mid - 1, (lo + mid - 1) / 2)
+ else if (mid + 1 < length && x >= candidates(mid + 1))
+ recur(mid + 1, hi, (mid + 1 + hi) / 2)
+ else mid
+ val initMid = if (0 <= hint && hint < length) hint else length / 2
+ if (length == 0 || x < candidates(0)) -1
+ else recur(0, length, initMid)
+ }
+
+ /** An array twice the size of given array, with existing elements copied over */
+ def dble[T: ClassTag](arr: Array[T]) = {
+ val arr1 = new Array[T](arr.length * 2)
+ Array.copy(arr, 0, arr1, 0, arr.length)
+ arr1
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/common.scala b/compiler/src/dotty/tools/dotc/util/common.scala
new file mode 100644
index 000000000..d9798aec5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/common.scala
@@ -0,0 +1,14 @@
+package dotty.tools.dotc
+package util
+
+import core.Names.Name
+import core.Types.WildcardType
+
+/** Common values hoisted out for performance */
+object common {
+
+ val alwaysTrue = Function.const(true) _
+ val alwaysZero = Function.const(0) _
+ val alwaysWildcardType = Function.const(WildcardType) _
+
+}
diff --git a/compiler/src/dotty/tools/dotc/util/kwords.sc b/compiler/src/dotty/tools/dotc/util/kwords.sc
new file mode 100644
index 000000000..94c17eaf4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/kwords.sc
@@ -0,0 +1,18 @@
+package dotty.tools.dotc.util
+
+import dotty.tools.dotc.parsing._
+import Scanners._
+import Tokens._
+
+object kwords {
+ println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet
+ keywords.toList.map(tokenString) //> res0: List[String] = List(if, for, else, this, null, new, with, super, case,
+ //| case class, case object, val, abstract, final, private, protected, override
+ //| , implicit, var, def, type, extends, true, false, object, class, import, pac
+ //| kage, yield, do, trait, sealed, throw, try, catch, finally, while, return, m
+ //| atch, lazy, then, forSome, _, :, =, <-, =>, ';', ';', <:, >:, #, @, <%)
+ keywords.toList.filter(kw => tokenString(kw) == null)
+ //> res1: List[Int] = List()
+ canStartStatTokens contains CASE //> res2: Boolean = false
+
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/util/lrutest.sc b/compiler/src/dotty/tools/dotc/util/lrutest.sc
new file mode 100644
index 000000000..6e6328b24
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/lrutest.sc
@@ -0,0 +1,40 @@
+package dotty.tools.dotc.util
+
+object lrutest {
+ println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet
+ val bits = new SixteenNibbles(0L) //> bits : dotty.tools.dotc.util.SixteenNibbles = SixteenNibbles(0, 0, 0, 0, 0,
+ //| 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ bits.updated(1, 3) //> res0: dotty.tools.dotc.util.SixteenNibbles = SixteenNibbles(0, 3, 0, 0, 0, 0
+ //| , 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ LRUCache.initialRing //> res1: dotty.tools.dotc.util.SixteenNibbles = SixteenNibbles(1, 2, 3, 4, 5, 6
+ //| , 7, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ val cache = new LRUCache[String, String] //> cache : dotty.tools.dotc.util.LRUCache[String,String] = LRUCache()
+ cache lookup "hi" //> res2: String = null
+ cache enter ("hi", "x")
+ cache.indices.take(10).toList //> res3: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0)
+ cache.last //> res4: Int = 6
+ cache lookup "hi" //> res5: String = x
+ cache.indices.take(10).toList //> res6: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0)
+
+ for (i <- 1 to 10) {
+ if (cache.lookup(i.toString) == null)
+ cache.enter(i.toString, i.toString)
+ }
+
+ cache.indices.take(10).toList //> res7: List[Int] = List(5, 6, 7, 0, 1, 2, 3, 4, 5, 6)
+ cache //> res8: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 -
+ //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3)
+ cache //> res9: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 -
+ //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3)
+ cache.lookup("7") //> res10: String = 7
+ cache.indices.take(10).toList //> res11: List[Int] = List(0, 5, 6, 7, 1, 2, 3, 4, 0, 5)
+ cache.keysIterator.toList //> res12: List[String] = List(7, 10, 9, 8, 6, 5, 4, 3)
+ cache.lookup("10") //> res13: String = 10
+ cache.lookup("5") //> res14: String = 5
+ cache //> res15: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(5 -> 5, 10 -
+ //| > 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4, 3 -> 3)
+ cache.lookup("11") //> res16: String = null
+ cache.enter("11", "!!")
+ cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5
+ //| -> 5, 10 -> 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4)
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala
new file mode 100644
index 000000000..e30eca492
--- /dev/null
+++ b/compiler/src/dotty/tools/io/ClassPath.scala
@@ -0,0 +1,421 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools
+package io
+
+import java.net.URL
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.StringOps.splitWhere
+import File.pathSeparator
+import java.net.MalformedURLException
+import Jar.isJarOrZip
+import ClassPath._
+import scala.Option.option2Iterable
+import scala.reflect.io.Path.string2path
+import language.postfixOps
+
+/** <p>
+ * This module provides star expansion of '-classpath' option arguments, behaves the same as
+ * java, see [http://java.sun.com/javase/6/docs/technotes/tools/windows/classpath.html]
+ * </p>
+ *
+ * @author Stepan Koltsov
+ */
+object ClassPath {
+
+ /** Expand single path entry */
+ private def expandS(pattern: String): List[String] = {
+ val wildSuffix = File.separator + "*"
+
+ /** Get all subdirectories, jars, zips out of a directory. */
+ def lsDir(dir: Directory, filt: String => Boolean = _ => true) = {
+ val files = synchronized(dir.list)
+ files filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
+ }
+
+ def basedir(s: String) =
+ if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
+ else "."
+
+ if (pattern == "*") lsDir(Directory("."))
+ else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
+ else if (pattern contains '*') {
+ val regexp = ("^%s$" format pattern.replaceAll("""\*""", """.*""")).r
+ lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
+ }
+ else List(pattern)
+ }
+
+ /** Split classpath using platform-dependent path separator */
+ def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
+
+ /** Join classpath using platform-dependent path separator */
+ def join(paths: String*): String = paths filterNot (_ == "") mkString pathSeparator
+
+ /** Split the classpath, apply a transformation function, and reassemble it. */
+ def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
+
+ /** Split the classpath, filter according to predicate, and reassemble. */
+ def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
+
+ /** Split the classpath and map them into Paths */
+ def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
+
+ /** Make all classpath components absolute. */
+ def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*)
+
+ /** Join the paths as a classpath */
+ def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
+ def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*)
+
+ /** Split the classpath and map them into URLs */
+ def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
+
+ /** Expand path and possibly expanding stars */
+ def expandPath(path: String, expandStar: Boolean = true): List[String] =
+ if (expandStar) split(path) flatMap expandS
+ else split(path)
+
+ /** Expand dir out to contents, a la extdir */
+ def expandDir(extdir: String): List[String] = {
+ AbstractFile getDirectory extdir match {
+ case null => Nil
+ case dir => dir filter (_.isClassContainer) map (x => new java.io.File(dir.file, x.name) getPath) toList
+ }
+ }
+ /** Expand manifest jar classpath entries: these are either urls, or paths
+ * relative to the location of the jar.
+ */
+ def expandManifestPath(jarPath: String): List[URL] = {
+ val file = File(jarPath)
+ if (!file.isFile) return Nil
+
+ val baseDir = file.parent
+ new Jar(file).classPathElements map (elem =>
+ specToURL(elem) getOrElse (baseDir / elem).toURL
+ )
+ }
+
+ /** A useful name filter. */
+ def isTraitImplementation(name: String) = name endsWith "$class.class"
+
+ def specToURL(spec: String): Option[URL] =
+ try Some(new URL(spec))
+ catch { case _: MalformedURLException => None }
+
+ /** A class modeling aspects of a ClassPath which should be
+ * propagated to any classpaths it creates.
+ */
+ abstract class ClassPathContext {
+ /** A filter which can be used to exclude entities from the classpath
+ * based on their name.
+ */
+ def isValidName(name: String): Boolean = true
+
+ /** From the representation to its identifier.
+ */
+ def toBinaryName(rep: AbstractFile): String
+
+ /** Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): ClassPath
+
+ /** Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[ClassPath] =
+ for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+ new SourcePath(dir, this)
+
+ def contentsOfDirsInPath(path: String): List[ClassPath] =
+ for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+ newClassPath(entry)
+
+ def classesAtAllURLS(path: String): List[ClassPath] =
+ (path split " ").toList flatMap classesAtURL
+
+ def classesAtURL(spec: String) =
+ for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
+ newClassPath(location)
+
+ def classesInExpandedPath(path: String): IndexedSeq[ClassPath] =
+ classesInPathImpl(path, true).toIndexedSeq
+
+ def classesInPath(path: String) = classesInPathImpl(path, false)
+
+ // Internal
+ private def classesInPathImpl(path: String, expand: Boolean) =
+ for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
+ newClassPath(dir)
+ }
+
+ class JavaContext extends ClassPathContext {
+ def toBinaryName(rep: AbstractFile) = {
+ val name = rep.name
+ assert(endsClass(name), name)
+ name.substring(0, name.length - 6)
+ }
+ def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
+ }
+
+ object DefaultJavaContext extends JavaContext {
+ override def isValidName(name: String) = !isTraitImplementation(name)
+ }
+
+ private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
+ private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
+ private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
+
+ /** From the source file to its identifier.
+ */
+ def toSourceName(f: AbstractFile): String = {
+ val name = f.name
+
+ if (endsScala(name)) name.substring(0, name.length - 6)
+ else if (endsJava(name)) name.substring(0, name.length - 5)
+ else throw new FatalError("Unexpected source file ending: " + name)
+ }
+}
+
+/**
+ * Represents a package which contains classes and other packages
+ */
+abstract class ClassPath {
+ type AnyClassRep = ClassPath#ClassRep
+
+ /**
+ * The short name of the package (without prefix)
+ */
+ def name: String
+
+ /**
+ * A String representing the origin of this classpath element, if known.
+ * For example, the path of the directory or jar.
+ */
+ def origin: Option[String] = None
+
+ /** A list of URLs representing this classpath.
+ */
+ def asURLs: List[URL]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClasspathString: String
+
+ /** Info which should be propagated to any sub-classpaths.
+ */
+ def context: ClassPathContext
+
+ /** Lists of entities.
+ */
+ def classes: IndexedSeq[AnyClassRep]
+ def packages: IndexedSeq[ClassPath]
+ def sourcepaths: IndexedSeq[AbstractFile]
+
+ /**
+ * Represents classes which can be loaded with a ClassfileLoader
+ * and / or a SourcefileLoader.
+ */
+ case class ClassRep(binary: Option[AbstractFile], source: Option[AbstractFile]) {
+ def name: String = binary match {
+ case Some(x) => context.toBinaryName(x)
+ case _ =>
+ assert(source.isDefined)
+ toSourceName(source.get)
+ }
+ }
+
+ /** Filters for assessing validity of various entities.
+ */
+ def validClassFile(name: String) = endsClass(name) && context.isValidName(name)
+ def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
+ def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+
+ /**
+ * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
+ * Does not support nested classes on .NET
+ */
+ def findClass(name: String): Option[AnyClassRep] =
+ splitWhere(name, _ == '.', true) match {
+ case Some((pkg, rest)) =>
+ val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
+ rep map {
+ case x: ClassRep => x
+ case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
+ }
+ case _ =>
+ classes find (_.name == name)
+ }
+
+ def findSourceFile(name: String): Option[AbstractFile] =
+ findClass(name) match {
+ case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x)
+ case _ => None
+ }
+
+ def sortString = join(split(asClasspathString).sorted: _*)
+ override def equals(that: Any) = that match {
+ case x: ClassPath => this.sortString == x.sortString
+ case _ => false
+ }
+ override def hashCode = sortString.hashCode()
+}
+
+/**
+ * A Classpath containing source files
+ */
+class SourcePath(dir: AbstractFile, val context: ClassPathContext) extends ClassPath {
+ def name = dir.name
+ override def origin = dir.underlyingSource map (_.path)
+ def asURLs = if (dir.file == null) Nil else List(dir.toURL)
+ def asClasspathString = dir.path
+ val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
+
+ private def traverse() = {
+ val classBuf = immutable.Vector.newBuilder[ClassRep]
+ val packageBuf = immutable.Vector.newBuilder[SourcePath]
+ dir foreach { f =>
+ if (!f.isDirectory && validSourceFile(f.name))
+ classBuf += ClassRep(None, Some(f))
+ else if (f.isDirectory && validPackage(f.name))
+ packageBuf += new SourcePath(f, context)
+ }
+ (packageBuf.result, classBuf.result)
+ }
+
+ lazy val (packages, classes) = traverse()
+ override def toString() = "sourcepath: " + dir.toString()
+}
+
+/**
+ * A directory (or a .jar file) containing classfiles and packages
+ */
+class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext) extends ClassPath {
+ def name = dir.name
+ override def origin = dir.underlyingSource map (_.path)
+ def asURLs = if (dir.file == null) Nil else List(dir.toURL)
+ def asClasspathString = dir.path
+ val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
+
+ // calculates (packages, classes) in one traversal.
+ private def traverse() = {
+ val classBuf = immutable.Vector.newBuilder[ClassRep]
+ val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
+ dir foreach { f =>
+ if (!f.isDirectory && validClassFile(f.name))
+ classBuf += ClassRep(Some(f), None)
+ else if (f.isDirectory && validPackage(f.name))
+ packageBuf += new DirectoryClassPath(f, context)
+ }
+ (packageBuf.result, classBuf.result)
+ }
+
+ lazy val (packages, classes) = traverse()
+ override def toString() = "directory classpath: " + origin.getOrElse("?")
+}
+
+class DeltaClassPath(original: MergedClassPath, subst: Map[ClassPath, ClassPath])
+extends MergedClassPath(original.entries map (e => subst getOrElse (e, e)), original.context) {
+ // not sure we should require that here. Commented out for now.
+ // require(subst.keySet subsetOf original.entries.toSet)
+ // We might add specialized operations for computing classes packages here. Not sure it's worth it.
+}
+
+/**
+ * A classpath unifying multiple class- and sourcepath entries.
+ */
+class MergedClassPath(
+ val entries: IndexedSeq[ClassPath],
+ val context: ClassPathContext)
+extends ClassPath {
+ def this(entries: TraversableOnce[ClassPath], context: ClassPathContext) =
+ this(entries.toIndexedSeq, context)
+
+ def name = entries.head.name
+ def asURLs = (entries flatMap (_.asURLs)).toList
+ lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
+
+ override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
+ override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
+
+ lazy val classes: IndexedSeq[AnyClassRep] = {
+ var count = 0
+ val indices = mutable.AnyRefMap[String, Int]()
+ val cls = new mutable.ArrayBuffer[AnyClassRep](1024)
+
+ for (e <- entries; c <- e.classes) {
+ val name = c.name
+ if (indices contains name) {
+ val idx = indices(name)
+ val existing = cls(idx)
+
+ if (existing.binary.isEmpty && c.binary.isDefined)
+ cls(idx) = existing.copy(binary = c.binary)
+ if (existing.source.isEmpty && c.source.isDefined)
+ cls(idx) = existing.copy(source = c.source)
+ }
+ else {
+ indices(name) = count
+ cls += c
+ count += 1
+ }
+ }
+ cls.toIndexedSeq
+ }
+
+ lazy val packages: IndexedSeq[ClassPath] = {
+ var count = 0
+ val indices = mutable.AnyRefMap[String, Int]()
+ val pkg = new mutable.ArrayBuffer[ClassPath](256)
+
+ for (e <- entries; p <- e.packages) {
+ val name = p.name
+ if (indices contains name) {
+ val idx = indices(name)
+ pkg(idx) = addPackage(pkg(idx), p)
+ }
+ else {
+ indices(name) = count
+ pkg += p
+ count += 1
+ }
+ }
+ pkg.toIndexedSeq
+ }
+
+ private def addPackage(to: ClassPath, pkg: ClassPath) = {
+ val newEntries: IndexedSeq[ClassPath] = to match {
+ case cp: MergedClassPath => cp.entries :+ pkg
+ case _ => IndexedSeq(to, pkg)
+ }
+ new MergedClassPath(newEntries, context)
+ }
+ def show(): Unit = {
+ println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
+ asClasspathString split ':' foreach (x => println(" " + x))
+ }
+ override def toString() = "merged classpath " + entries.mkString("(", "\n", ")")
+}
+
+/**
+ * The classpath when compiling with target:jvm. Binary files (classfiles) are represented
+ * as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
+ */
+class JavaClassPath(
+ containers: IndexedSeq[ClassPath],
+ context: JavaContext)
+extends MergedClassPath(containers, context) { }
+
+object JavaClassPath {
+ def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
+ val containers = {
+ for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
+ new DirectoryClassPath(f, context)
+ }
+ new JavaClassPath(containers.toIndexedSeq, context)
+ }
+ def fromURLs(urls: Seq[URL]): JavaClassPath =
+ fromURLs(urls, ClassPath.DefaultJavaContext)
+}
diff --git a/compiler/src/dotty/tools/io/DaemonThreadFactory.scala b/compiler/src/dotty/tools/io/DaemonThreadFactory.scala
new file mode 100644
index 000000000..ae0cda260
--- /dev/null
+++ b/compiler/src/dotty/tools/io/DaemonThreadFactory.scala
@@ -0,0 +1,16 @@
+package dotty.tools
+package io
+
+import java.util.concurrent._
+
+class DaemonThreadFactory extends ThreadFactory {
+ def newThread(r: Runnable): Thread = {
+ val thread = new Thread(r)
+ thread setDaemon true
+ thread
+ }
+}
+
+object DaemonThreadFactory {
+ def newPool() = Executors.newCachedThreadPool(new DaemonThreadFactory)
+}
diff --git a/compiler/src/dotty/tools/io/Fileish.scala b/compiler/src/dotty/tools/io/Fileish.scala
new file mode 100644
index 000000000..0fcb13307
--- /dev/null
+++ b/compiler/src/dotty/tools/io/Fileish.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package dotty.tools
+package io
+
+import java.io.{ InputStream }
+import java.util.jar.JarEntry
+import language.postfixOps
+
+/** A common interface for File-based things and Stream-based things.
+ * (In particular, io.File and JarEntry.)
+ */
+class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars {
+ def inputStream() = input()
+
+ def parent = path.parent
+ def name = path.name
+ def isSourceFile = path.hasExtension("java", "scala")
+
+ private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim }
+ lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".")
+ lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "."
+
+ override def toString = path.path
+}
+
+object Fileish {
+ def apply(f: File): Fileish = new Fileish(f, () => f.inputStream())
+ def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in)
+ def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in)
+}
diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala
new file mode 100644
index 000000000..42efc7e06
--- /dev/null
+++ b/compiler/src/dotty/tools/io/Jar.scala
@@ -0,0 +1,172 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+
+package dotty.tools
+package io
+
+import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, FileInputStream, DataOutputStream }
+import java.util.jar._
+import scala.collection.JavaConverters._
+import Attributes.Name
+import scala.language.{postfixOps, implicitConversions}
+
+// Attributes.Name instances:
+//
+// static Attributes.Name CLASS_PATH
+// static Attributes.Name CONTENT_TYPE
+// static Attributes.Name EXTENSION_INSTALLATION
+// static Attributes.Name EXTENSION_LIST
+// static Attributes.Name EXTENSION_NAME
+// static Attributes.Name IMPLEMENTATION_TITLE
+// static Attributes.Name IMPLEMENTATION_URL
+// static Attributes.Name IMPLEMENTATION_VENDOR
+// static Attributes.Name IMPLEMENTATION_VENDOR_ID
+// static Attributes.Name IMPLEMENTATION_VERSION
+// static Attributes.Name MAIN_CLASS
+// static Attributes.Name MANIFEST_VERSION
+// static Attributes.Name SEALED
+// static Attributes.Name SIGNATURE_VERSION
+// static Attributes.Name SPECIFICATION_TITLE
+// static Attributes.Name SPECIFICATION_VENDOR
+// static Attributes.Name SPECIFICATION_VERSION
+
+class Jar(file: File) extends Iterable[JarEntry] {
+ def this(jfile: JFile) = this(File(jfile))
+ def this(path: String) = this(File(path))
+
+ protected def errorFn(msg: String): Unit = Console println msg
+
+ lazy val jarFile = new JarFile(file.jfile)
+ lazy val manifest = withJarInput(s => Option(s.getManifest))
+
+ def mainClass = manifest map (f => f(Name.MAIN_CLASS))
+ /** The manifest-defined classpath String if available. */
+ def classPathString: Option[String] =
+ for (m <- manifest ; cp <- m.attrs get Name.CLASS_PATH) yield cp
+ def classPathElements: List[String] = classPathString match {
+ case Some(s) => s split "\\s+" toList
+ case _ => Nil
+ }
+
+ def withJarInput[T](f: JarInputStream => T): T = {
+ val in = new JarInputStream(file.inputStream())
+ try f(in)
+ finally in.close()
+ }
+ def jarWriter(mainAttrs: (Attributes.Name, String)*) = {
+ new JarWriter(file, Jar.WManifest(mainAttrs: _*).underlying)
+ }
+
+ override def foreach[U](f: JarEntry => U): Unit = withJarInput { in =>
+ Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f
+ }
+ override def iterator: Iterator[JarEntry] = this.toList.iterator
+ def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x)))
+
+ private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match {
+ case null => errorFn("No such entry: " + entry) ; null
+ case x => x
+ }
+ override def toString = "" + file
+}
+
+class JarWriter(val file: File, val manifest: Manifest) {
+ private lazy val out = new JarOutputStream(file.outputStream(), manifest)
+
+ /** Adds a jar entry for the given path and returns an output
+ * stream to which the data should immediately be written.
+ * This unusual interface exists to work with fjbg.
+ */
+ def newOutputStream(path: String): DataOutputStream = {
+ val entry = new JarEntry(path)
+ out putNextEntry entry
+ new DataOutputStream(out)
+ }
+
+ def writeAllFrom(dir: Directory): Unit = {
+ try dir.list foreach (x => addEntry(x, ""))
+ finally out.close()
+ }
+ def addStream(entry: JarEntry, in: InputStream): Unit = {
+ out putNextEntry entry
+ try transfer(in, out)
+ finally out.closeEntry()
+ }
+ def addFile(file: File, prefix: String): Unit = {
+ val entry = new JarEntry(prefix + file.name)
+ addStream(entry, file.inputStream())
+ }
+ def addEntry(entry: Path, prefix: String): Unit = {
+ if (entry.isFile) addFile(entry.toFile, prefix)
+ else addDirectory(entry.toDirectory, prefix + entry.name + "/")
+ }
+ def addDirectory(entry: Directory, prefix: String): Unit = {
+ entry.list foreach (p => addEntry(p, prefix))
+ }
+
+ private def transfer(in: InputStream, out: OutputStream) = {
+ val buf = new Array[Byte](10240)
+ def loop(): Unit = in.read(buf, 0, buf.length) match {
+ case -1 => in.close()
+ case n => out.write(buf, 0, n) ; loop
+ }
+ loop
+ }
+
+ def close() = out.close()
+}
+
+object Jar {
+ type AttributeMap = java.util.Map[Attributes.Name, String]
+
+ object WManifest {
+ def apply(mainAttrs: (Attributes.Name, String)*): WManifest = {
+ val m = WManifest(new JManifest)
+ for ((k, v) <- mainAttrs)
+ m(k) = v
+
+ m
+ }
+ def apply(manifest: JManifest): WManifest = new WManifest(manifest)
+ implicit def unenrichManifest(x: WManifest): JManifest = x.underlying
+ }
+ class WManifest(manifest: JManifest) {
+ for ((k, v) <- initialMainAttrs)
+ this(k) = v
+
+ def underlying = manifest
+ def attrs = manifest.getMainAttributes().asInstanceOf[AttributeMap].asScala withDefaultValue null
+ def initialMainAttrs: Map[Attributes.Name, String] = {
+ import scala.util.Properties._
+ Map(
+ Name.MANIFEST_VERSION -> "1.0",
+ ScalaCompilerVersion -> versionNumberString
+ )
+ }
+
+ def apply(name: Attributes.Name): String = attrs(name)
+ def apply(name: String): String = apply(new Attributes.Name(name))
+ def update(key: Attributes.Name, value: String) = attrs.put(key, value)
+ def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value)
+
+ def mainClass: String = apply(Name.MAIN_CLASS)
+ def mainClass_=(value: String) = update(Name.MAIN_CLASS, value)
+ }
+
+ // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
+ // for some ideas.
+ private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
+ private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
+
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
+ def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
+ f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
+
+ def create(file: File, sourceDir: Directory, mainClass: String): Unit = {
+ val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass)
+ writer writeAllFrom sourceDir
+ }
+}
diff --git a/compiler/src/dotty/tools/io/package.scala b/compiler/src/dotty/tools/io/package.scala
new file mode 100644
index 000000000..1c0e0b5c4
--- /dev/null
+++ b/compiler/src/dotty/tools/io/package.scala
@@ -0,0 +1,58 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package dotty.tools
+
+import java.util.concurrent.{ Future, Callable }
+import java.util.{ Timer, TimerTask }
+import java.util.jar.{ Attributes }
+import scala.language.implicitConversions
+
+package object io {
+ // Forwarders from scala.reflect.io
+ type AbstractFile = scala.reflect.io.AbstractFile
+ val AbstractFile = scala.reflect.io.AbstractFile
+ type Directory = scala.reflect.io.Directory
+ val Directory = scala.reflect.io.Directory
+ type File = scala.reflect.io.File
+ val File = scala.reflect.io.File
+ type Path = scala.reflect.io.Path
+ val Path = scala.reflect.io.Path
+ type PlainFile = scala.reflect.io.PlainFile
+ //val PlainFile = scala.reflect.io.PlainFile
+ val Streamable = scala.reflect.io.Streamable
+ type VirtualDirectory = scala.reflect.io.VirtualDirectory
+ type VirtualFile = scala.reflect.io.VirtualFile
+ val ZipArchive = scala.reflect.io.ZipArchive
+ type ZipArchive = scala.reflect.io.ZipArchive
+ type JManifest = java.util.jar.Manifest
+ type JFile = java.io.File
+
+ implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m)
+ private lazy val daemonThreadPool = DaemonThreadFactory.newPool()
+
+ def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
+ def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
+ def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
+ def submit(runnable: Runnable) = daemonThreadPool submit runnable
+
+ // Create, start, and return a daemon thread
+ def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
+ def newThread(f: Thread => Unit)(body: => Unit): Thread = {
+ val thread = new Thread(runnable(body))
+ f(thread)
+ thread.start
+ thread
+ }
+
+ // Set a timer to execute the given code.
+ def timer(seconds: Int)(body: => Unit): Timer = {
+ val alarm = new Timer(true) // daemon
+ val tt = new TimerTask { def run() = body }
+
+ alarm.schedule(tt, seconds * 1000)
+ alarm
+ }
+}
diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala
new file mode 100644
index 000000000..5dae82b71
--- /dev/null
+++ b/compiler/src/dotty/tools/package.scala
@@ -0,0 +1,24 @@
+package dotty
+import scala.annotation.Annotation
+
+package object tools {
+ type FatalError = scala.reflect.internal.FatalError
+ val FatalError = scala.reflect.internal.FatalError
+
+ class sharable extends Annotation
+ class unshared extends Annotation
+
+ val ListOfNil = Nil :: Nil
+
+ /** True if two lists have the same length. Since calling length on linear sequences
+ * is O(n), it is an inadvisable way to test length equality.
+ */
+ final def sameLength[T](xs: List[T], ys: List[T]): Boolean = xs match {
+ case _ :: xs1 =>
+ ys match {
+ case _ :: ys1 => sameLength(xs1, ys1)
+ case _ => false
+ }
+ case _ => ys.isEmpty
+ }
+}