1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
|
package dotty.tools
package dotc
import core._
import Contexts._
import Periods._
import Symbols._
import Scopes._
import typer.{FrontEnd, Typer, Mode, ImportInfo, RefChecks}
import reporting.ConsoleReporter
import dotty.tools.dotc.core.Phases.Phase
import dotty.tools.dotc.transform._
import dotty.tools.dotc.transform.TreeTransforms.{TreeTransform, TreeTransformer}
import dotty.tools.dotc.core.DenotTransformers.DenotTransformer
import dotty.tools.dotc.core.Denotations.SingleDenotation
class Compiler {
/** Meta-ordering constraint:
*
* DenotTransformers that change the signature of their denotation's info must go
* after erasure. The reason is that denotations are permanently referred to by
* TermRefs which contain a signature. If the signature of a symbol would change,
* all refs to it would become outdated - they could not be dereferenced in the
* new phase.
*
* As an example, addGetters would change a field
*
* val x: T
*
* to a method
*
* def x: T
*
* but this would affect the signature of `x` (goes from NotAMethod to a method
* signature). So we can't do this before erasure.
*
* After erasure, signature changing denot-transformers are OK because erasure
* will make sure that only term refs with fixed SymDenotations survive beyond it. This
* is possible because:
*
* - splitter has run, so every ident or select refers to a unique symbol
* - after erasure, asSeenFrom is the identity, so every reference has a
* plain SymDenotation, as opposed to a UniqueRefDenotation.
*/
def phases: List[List[Phase]] =
List(
List(new FrontEnd),
List(new FirstTransform,
new SyntheticMethods),
List(new SuperAccessors),
// pickling goes here
List(new RefChecks,
new ElimRepeated,
new ElimLocals),
List(new ExtensionMethods),
List(new TailRec),
List(new PatternMatcher,
new ExplicitOuter,
// new LazyValTranformContext().transformer, // disabled, awaiting fixes
new Splitter),
List(new ElimByName,
new TypeTestsCasts,
new InterceptedMethods,
new Literalize),
List(new Erasure),
List(new CapturedVars)
)
var runId = 1
def nextRunId = {
runId += 1; runId
}
/** Produces the following contexts, from outermost to innermost
*
* bootStrap: A context with next available runId and a scope consisting of
* the RootPackage _root_
* start A context with RootClass as owner and the necessary initializations
* for type checking.
* imports For each element of RootImports, an import context
*/
def rootContext(implicit ctx: Context): Context = {
ctx.definitions.init(ctx)
ctx.usePhases(phases)
val rootScope = new MutableScope
val bootstrap = ctx.fresh
.setPeriod(Period(nextRunId, FirstPhaseId))
.setScope(rootScope)
rootScope.enter(ctx.definitions.RootPackage)(bootstrap)
val start = bootstrap.fresh
.setOwner(defn.RootClass)
.setTyper(new Typer)
.setMode(Mode.ImplicitsEnabled)
.setTyperState(new MutableTyperState(ctx.typerState, new ConsoleReporter()(ctx), isCommittable = true))
ctx.definitions.init(start) // set context of definitions to start
def addImport(ctx: Context, sym: Symbol) =
ctx.fresh.setImportInfo(ImportInfo.rootImport(sym)(ctx))
(start.setRunInfo(new RunInfo(start)) /: defn.RootImports)(addImport)
}
def newRun(implicit ctx: Context): Run = {
try new Run(this)(rootContext)
finally {
ctx.base.reset()
ctx.runInfo.clear()
}
}
}
|