aboutsummaryrefslogtreecommitdiff
path: root/compiler/src/dotty/tools/dotc/core/tasty
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/src/dotty/tools/dotc/core/tasty')
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala53
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala101
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala79
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala39
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala188
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala553
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala71
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala122
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala141
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala95
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala188
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala641
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala1161
14 files changed, 3462 insertions, 0 deletions
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
new file mode 100644
index 000000000..2c93819d5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -0,0 +1,53 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, SymDenotations._, Symbols._
+import dotty.tools.dotc.ast.tpd
+import TastyUnpickler._, TastyBuffer._
+import util.Positions._
+import util.{SourceFile, NoSource}
+import Annotations.Annotation
+import core.Mode
+import classfile.ClassfileParser
+
+object DottyUnpickler {
+
+ /** Exception thrown if classfile is corrupted */
+ class BadSignature(msg: String) extends RuntimeException(msg)
+
+ class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler])
+ extends SectionUnpickler[TreeUnpickler]("ASTs") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new TreeUnpickler(reader, tastyName, posUnpickler)
+ }
+
+ class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new PositionUnpickler(reader)
+ }
+}
+
+/** A class for unpickling Tasty trees and symbols.
+ * @param bytes the bytearray containing the Tasty file from which we unpickle
+ */
+class DottyUnpickler(bytes: Array[Byte]) extends ClassfileParser.Embedded {
+ import tpd._
+ import DottyUnpickler._
+
+ val unpickler = new TastyUnpickler(bytes)
+ private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler)
+ private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler(posUnpicklerOpt)).get
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
+ treeUnpickler.enterTopLevel(roots)
+
+ /** The unpickled trees, and the source file they come from. */
+ def body(implicit ctx: Context): List[Tree] = {
+ treeUnpickler.unpickle()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
new file mode 100644
index 000000000..3ff7298ce
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -0,0 +1,101 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import collection.mutable
+import Names.{Name, chrs}
+import Decorators._, NameOps._
+import TastyBuffer._
+import scala.io.Codec
+import TastyName._
+import TastyFormat._
+
+class NameBuffer extends TastyBuffer(10000) {
+ import NameBuffer._
+
+ private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef]
+
+ def nameIndex(name: TastyName): NameRef = nameRefs.get(name) match {
+ case Some(ref) =>
+ ref
+ case None =>
+ val ref = NameRef(nameRefs.size)
+ nameRefs(name) = ref
+ ref
+ }
+ def nameIndex(name: Name): NameRef = {
+ val tname =
+ if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed))
+ else Simple(name.toTermName)
+ nameIndex(tname)
+ }
+
+ def nameIndex(str: String): NameRef = nameIndex(str.toTermName)
+
+ def fullNameIndex(name: Name): NameRef = {
+ val pos = name.lastIndexOf('.')
+ if (pos > 0)
+ nameIndex(Qualified(fullNameIndex(name.take(pos)), nameIndex(name.drop(pos + 1))))
+ else
+ nameIndex(name)
+ }
+
+ private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = {
+ val lengthAddr = currentAddr
+ for (i <- 0 until lengthWidth) writeByte(0)
+ op
+ val length = currentAddr.index - lengthAddr.index - 1
+ putNat(lengthAddr, length, lengthWidth)
+ }
+
+ def writeNameRef(ref: NameRef) = writeNat(ref.index)
+
+ def pickleName(name: TastyName): Unit = name match {
+ case Simple(name) =>
+ val bytes =
+ if (name.length == 0) new Array[Byte](0)
+ else Codec.toUTF8(chrs, name.start, name.length)
+ writeByte(UTF8)
+ writeNat(bytes.length)
+ writeBytes(bytes, bytes.length)
+ case Qualified(qualified, selector) =>
+ writeByte(QUALIFIED)
+ withLength { writeNameRef(qualified); writeNameRef(selector) }
+ case Signed(original, params, result) =>
+ writeByte(SIGNED)
+ withLength(
+ { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) },
+ if ((params.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2)
+ case Expanded(prefix, original) =>
+ writeByte(EXPANDED)
+ withLength { writeNameRef(prefix); writeNameRef(original) }
+ case ModuleClass(module) =>
+ writeByte(OBJECTCLASS)
+ withLength { writeNameRef(module) }
+ case SuperAccessor(accessed) =>
+ writeByte(SUPERACCESSOR)
+ withLength { writeNameRef(accessed) }
+ case DefaultGetter(method, paramNumber) =>
+ writeByte(DEFAULTGETTER)
+ withLength { writeNameRef(method); writeNat(paramNumber) }
+ case Shadowed(original) =>
+ writeByte(SHADOWED)
+ withLength { writeNameRef(original) }
+ }
+
+ override def assemble(): Unit = {
+ var i = 0
+ for ((name, ref) <- nameRefs) {
+ assert(ref.index == i)
+ i += 1
+ pickleName(name)
+ }
+ }
+}
+
+object NameBuffer {
+ private val maxIndexWidth = 3 // allows name indices up to 2^21.
+ private val payloadBitsPerByte = 7 // determined by nat encoding in TastyBuffer
+ private val maxNumInByte = (1 << payloadBitsPerByte) - 1
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
new file mode 100644
index 000000000..546894a9e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -0,0 +1,79 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast._
+import ast.Trees._
+import ast.Trees.WithLazyField
+import TastyFormat._
+import core._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._
+import collection.mutable
+import TastyBuffer._
+import util.Positions._
+
+class PositionPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Option[Addr]) {
+ val buf = new TastyBuffer(5000)
+ pickler.newSection("Positions", buf)
+ import buf._
+ import ast.tpd._
+
+ private val remainingAddrs = new java.util.IdentityHashMap[Tree, Iterator[Addr]]
+
+ def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean) = {
+ def toInt(b: Boolean) = if (b) 1 else 0
+ (addrDelta << 3) | (toInt(hasStartDelta) << 2) | (toInt(hasEndDelta) << 1) | toInt(hasPoint)
+ }
+
+ def picklePositions(roots: List[Tree])(implicit ctx: Context) = {
+ var lastIndex = 0
+ var lastPos = Position(0, 0)
+ def pickleDeltas(index: Int, pos: Position) = {
+ val addrDelta = index - lastIndex
+ val startDelta = pos.start - lastPos.start
+ val endDelta = pos.end - lastPos.end
+ buf.writeInt(header(addrDelta, startDelta != 0, endDelta != 0, !pos.isSynthetic))
+ if (startDelta != 0) buf.writeInt(startDelta)
+ if (endDelta != 0) buf.writeInt(endDelta)
+ if (!pos.isSynthetic) buf.writeInt(pos.pointDelta)
+ lastIndex = index
+ lastPos = pos
+ }
+
+ /** True if x's position cannot be reconstructed automatically from its initialPos
+ */
+ def alwaysNeedsPos(x: Positioned) = x match {
+ case _: WithLazyField[_] // initialPos is inaccurate for trees with lazy field
+ | _: Trees.PackageDef[_] => true // package defs might be split into several Tasty files
+ case _ => false
+ }
+
+ def traverse(x: Any): Unit = x match {
+ case x: Tree @unchecked =>
+ val pos = if (x.isInstanceOf[MemberDef]) x.pos else x.pos.toSynthetic
+ if (pos.exists && (pos != x.initialPos.toSynthetic || alwaysNeedsPos(x))) {
+ addrOfTree(x) match {
+ case Some(addr) =>
+ //println(i"pickling $x with $pos at $addr")
+ pickleDeltas(addr.index, pos)
+ case _ =>
+ //println(i"no address for $x")
+ }
+ }
+ //else if (x.pos.exists) println(i"skipping $x")
+ x match {
+ case x: MemberDef @unchecked =>
+ for (ann <- x.symbol.annotations) traverse(ann.tree)
+ case _ =>
+ }
+ traverse(x.productIterator)
+ case xs: TraversableOnce[_] =>
+ xs.foreach(traverse)
+ case x: Annotation =>
+ traverse(x.tree)
+ case _ =>
+ }
+ traverse(roots)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
new file mode 100644
index 000000000..cbe213d89
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
@@ -0,0 +1,39 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+
+import util.Positions._
+import collection.mutable
+import TastyBuffer.{Addr, NoAddr}
+
+/** Unpickler for tree positions */
+class PositionUnpickler(reader: TastyReader) {
+ import reader._
+
+ private[tasty] lazy val positions = {
+ val positions = new mutable.HashMap[Addr, Position]
+ var curIndex = 0
+ var curStart = 0
+ var curEnd = 0
+ while (!isAtEnd) {
+ val header = readInt()
+ val addrDelta = header >> 3
+ val hasStart = (header & 4) != 0
+ val hasEnd = (header & 2) != 0
+ val hasPoint = (header & 1) != 0
+ curIndex += addrDelta
+ assert(curIndex >= 0)
+ if (hasStart) curStart += readInt()
+ if (hasEnd) curEnd += readInt()
+ positions(Addr(curIndex)) =
+ if (hasPoint) Position(curStart, curEnd, curStart + readInt())
+ else Position(curStart, curEnd)
+ }
+ positions
+ }
+
+ def posAt(addr: Addr) = positions.getOrElse(addr, NoPosition)
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
new file mode 100644
index 000000000..13bc95028
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.dble
+
+object TastyBuffer {
+
+ /** The number of digits of the natural number `nat`, written in base 128 format. */
+ def natSize(nat: Int): Int =
+ if (nat < 128) 1 else natSize(nat >>> 7) + 1
+
+ /** An address pointing to an index in a Tasty buffer's byte array */
+ case class Addr(index: Int) extends AnyVal {
+ def - (delta: Int): Addr = Addr(this.index - delta)
+ def + (delta: Int): Addr = Addr(this.index + delta)
+
+ def relativeTo(base: Addr): Addr = this - base.index - AddrWidth
+ }
+
+ val NoAddr = Addr(-1)
+
+ /** The maximal number of address bytes.
+ * Since addresses are written as base-128 natural numbers,
+ * the value of 4 gives a maximal array size of 256M.
+ */
+ final val AddrWidth = 4
+}
+import TastyBuffer._
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ */
+class TastyBuffer(initialSize: Int) {
+
+ /** The current byte array, will be expanded as needed */
+ var bytes = new Array[Byte](initialSize)
+
+ /** The number of bytes written */
+ var length = 0
+
+ // -- Output routines --------------------------------------------
+
+ /** Write a byte of data. */
+ def writeByte(b: Int): Unit = {
+ if (length >= bytes.length)
+ bytes = dble(bytes)
+ bytes(length) = b.toByte
+ length += 1
+ }
+
+ /** Write the first `n` bytes of `data`. */
+ def writeBytes(data: Array[Byte], n: Int): Unit = {
+ while (bytes.length < length + n) bytes = dble(bytes)
+ Array.copy(data, 0, bytes, length, n)
+ length += n
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int): Unit =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /** Write a natural number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeInt(x: Int): Unit =
+ writeLongInt(x)
+
+ /**
+ * Like writeNat, but for longs. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Like writeInt, but for longs */
+ def writeLongInt(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Write an uncompressed Long stored in 8 bytes in big endian format */
+ def writeUncompressedLong(x: Long): Unit = {
+ var y = x
+ val bytes = new Array[Byte](8)
+ for (i <- 7 to 0 by -1) {
+ bytes(i) = (y & 0xff).toByte
+ y = y >>> 8
+ }
+ writeBytes(bytes, 8)
+ }
+
+ // -- Address handling --------------------------------------------
+
+ /** Write natural number `x` right-adjusted in a field of `width` bytes
+ * starting with address `at`.
+ */
+ def putNat(at: Addr, x: Int, width: Int): Unit = {
+ var y = x
+ var w = width
+ if(at.index + w >= bytes.length)
+ bytes = dble(bytes)
+ var digit = y & 0x7f | 0x80
+ while (w > 0) {
+ w -= 1
+ bytes(at.index + w) = digit.toByte
+ y >>>= 7
+ digit = y & 0x7f
+ }
+ assert(y == 0, s"number $x too large to fit in $width bytes")
+ }
+
+ /** The byte at given address */
+ def getByte(at: Addr): Int = bytes(at.index)
+
+ /** The natural number at address `at` */
+ def getNat(at: Addr): Int = getLongNat(at).toInt
+
+ /** The long natural number at address `at` */
+ def getLongNat(at: Addr): Long = {
+ var b = 0L
+ var x = 0L
+ var idx = at.index
+ do {
+ b = bytes(idx)
+ x = (x << 7) | (b & 0x7f)
+ idx += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** The address (represented as a natural number) at address `at` */
+ def getAddr(at: Addr) = Addr(getNat(at))
+
+ /** The smallest address equal to or following `at` which points to a non-zero byte */
+ final def skipZeroes(at: Addr): Addr =
+ if (getByte(at) != 0) at else skipZeroes(at + 1)
+
+ /** The address after the natural number found at address `at`. */
+ final def skipNat(at: Addr): Addr = {
+ val next = at + 1
+ if ((getByte(at) & 0x80) != 0) next else skipNat(next)
+ }
+
+ /** The address referring to the end of data written so far */
+ def currentAddr: Addr = Addr(length)
+
+ /** Reserve `AddrWidth` bytes to write an address into */
+ def reserveAddr(): Addr = {
+ val result = currentAddr
+ length += AddrWidth
+ result
+ }
+
+ /** Fill reserved space at address `at` with address `target` */
+ def fillAddr(at: Addr, target: Addr) =
+ putNat(at, target.index, AddrWidth)
+
+ /** Write address without leading zeroes */
+ def writeAddr(addr: Addr): Unit = writeNat(addr.index)
+
+ // -- Finalization --------------------------------------------
+
+ /** Hook to be overridden in subclasses.
+ * Perform all actions necessary to assemble the final byte array.
+ * After `assemble` no more output actions to this buffer are permitted.
+ */
+ def assemble(): Unit = ()
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
new file mode 100644
index 000000000..cb1b56c3c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
@@ -0,0 +1,553 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+/************************************************************
+Notation:
+
+We use BNF notation. Terminal symbols start with at least two
+consecutive upper case letters. Each terminal is represented as a
+single byte tag. Non-terminals are mixed case. Prefixes of the form
+lower case letter*_ are for explanation of semantic content only, they
+can be dropped without changing the grammar.
+
+Micro-syntax:
+
+ LongInt = Digit* StopDigit // big endian 2's complement, value fits in a Long w/o overflow
+ Int = LongInt // big endian 2's complement, fits in an Int w/o overflow
+ Nat = LongInt // non-negative value, fits in an Int without overflow
+ Digit = 0 | ... | 127
+ StopDigit = 128 | ... | 255 // value = digit - 128
+
+Macro-format:
+
+ File = Header majorVersion_Nat minorVersion_Nat UUID
+ nameTable_Length Name* Section*
+ Header = 0x5CA1AB1F
+ UUID = Byte*16 // random UUID
+
+ Section = NameRef Length Bytes
+ Length = Nat // length of rest of entry in bytes
+
+ Name = UTF8 Length UTF8-CodePoint*
+ QUALIFIED Length qualified_NameRef selector_NameRef
+ SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef*
+ EXPANDED Length original_NameRef
+ OBJECTCLASS Length module_NameRef
+ SUPERACCESSOR Length accessed_NameRef
+ DEFAULTGETTER Length method_NameRef paramNumber_Nat
+ SHADOWED Length original_NameRef
+ MANGLED Length mangle_NameRef name_NameRef
+ ...
+
+ NameRef = Nat // ordinal number of name in name table, starting from 1.
+
+Note: Unqualified names in the name table are strings. The context decides whether a name is
+a type-name or a term-name. The same string can represent both.
+
+Standard-Section: "ASTs" TopLevelStat*
+
+ TopLevelStat = PACKAGE Length Path TopLevelStat*
+ Stat
+
+ Stat = Term
+ VALDEF Length NameRef Type rhs_Term? Modifier*
+ DEFDEF Length NameRef TypeParam* Params* return_Type rhs_Term?
+ Modifier*
+ TYPEDEF Length NameRef (Type | Template) Modifier*
+ IMPORT Length qual_Term Selector*
+ Selector = IMPORTED name_NameRef
+ RENAMED to_NameRef
+
+ // Imports are for scala.meta, they are not used in the backend
+
+ TypeParam = TYPEPARAM Length NameRef Type Modifier*
+ Params = PARAMS Length Param*
+ Param = PARAM Length NameRef Type rhs_Term? Modifier* // rhs_Term is present in the case of an aliased class parameter
+ Template = TEMPLATE Length TypeParam* Param* Parent* Self? Stat* // Stat* always starts with the primary constructor.
+ Parent = Application
+ Type
+ Self = SELFDEF selfName_NameRef selfType_Type
+
+ Term = Path
+ Application
+ IDENT NameRef Type // used when term ident’s type is not a TermRef
+ SELECT possiblySigned_NameRef qual_Term
+ QUALTHIS typeIdent_Tree
+ NEW cls_Type
+ SUPER Length this_Term mixinTypeIdent_Tree?
+ TYPED Length expr_Term ascription_Type
+ NAMEDARG Length paramName_NameRef arg_Term
+ ASSIGN Length lhs_Term rhs_Term
+ BLOCK Length expr_Term Stat*
+ INLINED Length call_Term expr_Term Stat*
+ LAMBDA Length meth_Term target_Type
+ IF Length cond_Term then_Term else_Term
+ MATCH Length sel_Term CaseDef*
+ TRY Length expr_Term CaseDef* finalizer_Term?
+ RETURN Length meth_ASTRef expr_Term?
+ REPEATED Length elem_Type elem_Term*
+ BIND Length boundName_NameRef patType_Type pat_Term
+ ALTERNATIVE Length alt_Term*
+ UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term*
+ IDENTtpt NameRef Type // used for all type idents
+ SELECTtpt NameRef qual_Term
+ SINGLETONtpt Path
+ REFINEDtpt Length underlying_Term refinement_Stat*
+ APPLIEDtpt Length tycon_Term arg_Term*
+ POLYtpt Length TypeParam* body_Term
+ TYPEBOUNDStpt Length low_Term high_Term
+ ANNOTATEDtpt Length underlying_Term fullAnnotation_Term
+ ANDtpt Length left_Term right_Term
+ ORtpt Length left_Term right_Term
+ BYNAMEtpt underlying_Term
+ EMPTYTREE
+ SHARED term_ASTRef
+ Application = APPLY Length fn_Term arg_Term*
+
+ TYPEAPPLY Length fn_Term arg_Type*
+ CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree?
+ ImplicitArg = IMPLICITARG arg_Term
+ ASTRef = Nat // byte position in AST payload
+
+ Path = Constant
+ TERMREFdirect sym_ASTRef
+ TERMREFsymbol sym_ASTRef qual_Type
+ TERMREFpkg fullyQualified_NameRef
+ TERMREF possiblySigned_NameRef qual_Type
+ THIS clsRef_Type
+ RECthis recType_ASTRef
+ SHARED path_ASTRef
+
+ Constant = UNITconst
+ FALSEconst
+ TRUEconst
+ BYTEconst Int
+ SHORTconst Int
+ CHARconst Nat
+ INTconst Int
+ LONGconst LongInt
+ FLOATconst Int
+ DOUBLEconst LongInt
+ STRINGconst NameRef
+ NULLconst
+ CLASSconst Type
+ ENUMconst Path
+
+ Type = Path
+ TYPEREFdirect sym_ASTRef
+ TYPEREFsymbol sym_ASTRef qual_Type
+ TYPEREFpkg fullyQualified_NameRef
+ TYPEREF possiblySigned_NameRef qual_Type
+ RECtype parent_Type
+ SUPERtype Length this_Type underlying_Type
+ REFINEDtype Length underlying_Type refinement_NameRef info_Type
+ APPLIEDtype Length tycon_Type arg_Type*
+ TYPEBOUNDS Length low_Type high_Type
+ TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)?
+ ANNOTATEDtype Length underlying_Type fullAnnotation_Term
+ ANDtype Length left_Type right_Type
+ ORtype Length left_Type right_Type
+ BIND Length boundName_NameRef bounds_Type
+ // for type-variables defined in a type pattern
+ BYNAMEtype underlying_Type
+ POLYtype Length result_Type NamesTypes // variance encoded in front of name: +/-/=
+ METHODtype Length result_Type NamesTypes // needed for refinements
+ PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements
+ SHARED type_ASTRef
+ NamesTypes = NameType*
+ NameType = paramName_NameRef typeOrBounds_ASTRef
+
+ Modifier = PRIVATE
+ INTERNAL // package private
+ PROTECTED
+ PRIVATEqualified qualifier_Type // will be dropped
+ PROTECTEDqualified qualifier_Type // will be dropped
+ ABSTRACT
+ FINAL
+ SEALED
+ CASE
+ IMPLICIT
+ LAZY
+ OVERRIDE
+ INLINE // macro
+ STATIC // mapped to static Java member
+ OBJECT // an object or its class
+ TRAIT // a trait
+ LOCAL // private[this] or protected[this]
+ SYNTHETIC // generated by Scala compiler
+ ARTIFACT // to be tagged Java Synthetic
+ MUTABLE // a var
+ LABEL // method generated as a label
+ FIELDaccessor // getter or setter
+ CASEaccessor // getter for case class param
+ COVARIANT // type param marked “+”
+ CONTRAVARIANT // type param marked “-”
+ SCALA2X // Imported from Scala2.x
+ DEFAULTparameterized // Method with default params
+ INSUPERCALL // defined in the argument of a constructor supercall
+ STABLE // Method that is assumed to be stable
+ Annotation
+ Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term
+
+Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way.
+
+ Category 1 (tags 0-63) : tag
+ Category 2 (tags 64-95) : tag Nat
+ Category 3 (tags 96-111) : tag AST
+ Category 4 (tags 112-127): tag Nat AST
+ Category 5 (tags 128-255): tag Length <payload>
+
+Standard Section: "Positions" Assoc*
+
+ Assoc = Header offset_Delta? offset_Delta?
+ Header = addr_Delta + // in one Nat: difference of address to last recorded node << 2 +
+ hasStartDiff + // one bit indicating whether there follows a start address delta << 1
+ hasEndDiff // one bit indicating whether there follows an end address delta
+ // Nodes which have the same positions as their parents are omitted.
+ // offset_Deltas give difference of start/end offset wrt to the
+ // same offset in the previously recorded node (or 0 for the first recorded node)
+ Delta = Int // Difference between consecutive offsets,
+
+**************************************************************************************/
+
+object TastyFormat {
+
+ final val header = Array(0x5C, 0xA1, 0xAB, 0x1F)
+ final val MajorVersion = 0
+ final val MinorVersion = 5
+
+ // Name tags
+
+ final val UTF8 = 1
+ final val QUALIFIED = 2
+ final val SIGNED = 3
+ final val EXPANDED = 4
+ final val OBJECTCLASS = 5
+ final val SUPERACCESSOR = 6
+ final val DEFAULTGETTER = 7
+ final val SHADOWED = 8
+
+ // AST tags
+
+ final val UNITconst = 2
+ final val FALSEconst = 3
+ final val TRUEconst = 4
+ final val NULLconst = 5
+ final val PRIVATE = 6
+ final val INTERNAL = 7
+ final val PROTECTED = 8
+ final val ABSTRACT = 9
+ final val FINAL = 10
+ final val SEALED = 11
+ final val CASE = 12
+ final val IMPLICIT = 13
+ final val LAZY = 14
+ final val OVERRIDE = 15
+ final val INLINE = 16
+ final val STATIC = 17
+ final val OBJECT = 18
+ final val TRAIT = 19
+ final val LOCAL = 20
+ final val SYNTHETIC = 21
+ final val ARTIFACT = 22
+ final val MUTABLE = 23
+ final val LABEL = 24
+ final val FIELDaccessor = 25
+ final val CASEaccessor = 26
+ final val COVARIANT = 27
+ final val CONTRAVARIANT = 28
+ final val SCALA2X = 29
+ final val DEFAULTparameterized = 30
+ final val INSUPERCALL = 31
+ final val STABLE = 32
+
+ final val SHARED = 64
+ final val TERMREFdirect = 65
+ final val TYPEREFdirect = 66
+ final val TERMREFpkg = 67
+ final val TYPEREFpkg = 68
+ final val RECthis = 69
+ final val BYTEconst = 70
+ final val SHORTconst = 71
+ final val CHARconst = 72
+ final val INTconst = 73
+ final val LONGconst = 74
+ final val FLOATconst = 75
+ final val DOUBLEconst = 76
+ final val STRINGconst = 77
+ final val IMPORTED = 78
+ final val RENAMED = 79
+
+ final val THIS = 96
+ final val QUALTHIS = 97
+ final val CLASSconst = 98
+ final val ENUMconst = 99
+ final val BYNAMEtype = 100
+ final val BYNAMEtpt = 101
+ final val NEW = 102
+ final val IMPLICITarg = 103
+ final val PRIVATEqualified = 104
+ final val PROTECTEDqualified = 105
+ final val RECtype = 106
+ final val SINGLETONtpt = 107
+
+ final val IDENT = 112
+ final val IDENTtpt = 113
+ final val SELECT = 114
+ final val SELECTtpt = 115
+ final val TERMREFsymbol = 116
+ final val TERMREF = 117
+ final val TYPEREFsymbol = 118
+ final val TYPEREF = 119
+ final val SELFDEF = 120
+
+ final val PACKAGE = 128
+ final val VALDEF = 129
+ final val DEFDEF = 130
+ final val TYPEDEF = 131
+ final val IMPORT = 132
+ final val TYPEPARAM = 133
+ final val PARAMS = 134
+ final val PARAM = 136
+ final val APPLY = 137
+ final val TYPEAPPLY = 138
+ final val TYPED = 139
+ final val NAMEDARG = 140
+ final val ASSIGN = 141
+ final val BLOCK = 142
+ final val IF = 143
+ final val LAMBDA = 144
+ final val MATCH = 145
+ final val RETURN = 146
+ final val TRY = 147
+ final val INLINED = 148
+ final val REPEATED = 149
+ final val BIND = 150
+ final val ALTERNATIVE = 151
+ final val UNAPPLY = 152
+ final val ANNOTATEDtype = 153
+ final val ANNOTATEDtpt = 154
+ final val CASEDEF = 155
+ final val TEMPLATE = 156
+ final val SUPER = 157
+ final val SUPERtype = 158
+ final val REFINEDtype = 159
+ final val REFINEDtpt = 160
+ final val APPLIEDtype = 161
+ final val APPLIEDtpt = 162
+ final val TYPEBOUNDS = 163
+ final val TYPEBOUNDStpt = 164
+ final val TYPEALIAS = 165
+ final val ANDtype = 166
+ final val ANDtpt = 167
+ final val ORtype = 168
+ final val ORtpt = 169
+ final val METHODtype = 170
+ final val POLYtype = 171
+ final val POLYtpt = 172
+ final val PARAMtype = 173
+ final val ANNOTATION = 174
+
+ final val firstSimpleTreeTag = UNITconst
+ final val firstNatTreeTag = SHARED
+ final val firstASTTreeTag = THIS
+ final val firstNatASTTreeTag = IDENT
+ final val firstLengthTreeTag = PACKAGE
+
+ def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM
+
+ def isModifierTag(tag: Int) = tag match {
+ case PRIVATE
+ | INTERNAL
+ | PROTECTED
+ | ABSTRACT
+ | FINAL
+ | SEALED
+ | CASE
+ | IMPLICIT
+ | LAZY
+ | OVERRIDE
+ | INLINE
+ | STATIC
+ | OBJECT
+ | TRAIT
+ | LOCAL
+ | SYNTHETIC
+ | ARTIFACT
+ | MUTABLE
+ | LABEL
+ | FIELDaccessor
+ | CASEaccessor
+ | COVARIANT
+ | CONTRAVARIANT
+ | SCALA2X
+ | DEFAULTparameterized
+ | INSUPERCALL
+ | STABLE
+ | ANNOTATION
+ | PRIVATEqualified
+ | PROTECTEDqualified => true
+ case _ => false
+ }
+
+ def isTypeTreeTag(tag: Int) = tag match {
+ case IDENTtpt
+ | SELECTtpt
+ | SINGLETONtpt
+ | REFINEDtpt
+ | APPLIEDtpt
+ | POLYtpt
+ | TYPEBOUNDStpt
+ | ANNOTATEDtpt
+ | ANDtpt
+ | ORtpt
+ | BYNAMEtpt => true
+ case _ => false
+ }
+
+ def nameTagToString(tag: Int): String = tag match {
+ case UTF8 => "UTF8"
+ case QUALIFIED => "QUALIFIED"
+ case SIGNED => "SIGNED"
+ case EXPANDED => "EXPANDED"
+ case OBJECTCLASS => "OBJECTCLASS"
+ case SUPERACCESSOR => "SUPERACCESSOR"
+ case DEFAULTGETTER => "DEFAULTGETTER"
+ }
+
+ def astTagToString(tag: Int): String = tag match {
+ case UNITconst => "UNITconst"
+ case FALSEconst => "FALSEconst"
+ case TRUEconst => "TRUEconst"
+ case NULLconst => "NULLconst"
+ case PRIVATE => "PRIVATE"
+ case INTERNAL => "INTERNAL"
+ case PROTECTED => "PROTECTED"
+ case ABSTRACT => "ABSTRACT"
+ case FINAL => "FINAL"
+ case SEALED => "SEALED"
+ case CASE => "CASE"
+ case IMPLICIT => "IMPLICIT"
+ case LAZY => "LAZY"
+ case OVERRIDE => "OVERRIDE"
+ case INLINE => "INLINE"
+ case STATIC => "STATIC"
+ case OBJECT => "OBJECT"
+ case TRAIT => "TRAIT"
+ case LOCAL => "LOCAL"
+ case SYNTHETIC => "SYNTHETIC"
+ case ARTIFACT => "ARTIFACT"
+ case MUTABLE => "MUTABLE"
+ case LABEL => "LABEL"
+ case FIELDaccessor => "FIELDaccessor"
+ case CASEaccessor => "CASEaccessor"
+ case COVARIANT => "COVARIANT"
+ case CONTRAVARIANT => "CONTRAVARIANT"
+ case SCALA2X => "SCALA2X"
+ case DEFAULTparameterized => "DEFAULTparameterized"
+ case INSUPERCALL => "INSUPERCALL"
+ case STABLE => "STABLE"
+
+ case SHARED => "SHARED"
+ case TERMREFdirect => "TERMREFdirect"
+ case TYPEREFdirect => "TYPEREFdirect"
+ case TERMREFpkg => "TERMREFpkg"
+ case TYPEREFpkg => "TYPEREFpkg"
+ case RECthis => "RECthis"
+ case BYTEconst => "BYTEconst"
+ case SHORTconst => "SHORTconst"
+ case CHARconst => "CHARconst"
+ case INTconst => "INTconst"
+ case LONGconst => "LONGconst"
+ case FLOATconst => "FLOATconst"
+ case DOUBLEconst => "DOUBLEconst"
+ case STRINGconst => "STRINGconst"
+ case RECtype => "RECtype"
+
+ case IDENT => "IDENT"
+ case IDENTtpt => "IDENTtpt"
+ case SELECT => "SELECT"
+ case SELECTtpt => "SELECTtpt"
+ case TERMREFsymbol => "TERMREFsymbol"
+ case TERMREF => "TERMREF"
+ case TYPEREFsymbol => "TYPEREFsymbol"
+ case TYPEREF => "TYPEREF"
+
+ case PACKAGE => "PACKAGE"
+ case VALDEF => "VALDEF"
+ case DEFDEF => "DEFDEF"
+ case TYPEDEF => "TYPEDEF"
+ case IMPORT => "IMPORT"
+ case TYPEPARAM => "TYPEPARAM"
+ case PARAMS => "PARAMS"
+ case PARAM => "PARAM"
+ case IMPORTED => "IMPORTED"
+ case RENAMED => "RENAMED"
+ case APPLY => "APPLY"
+ case TYPEAPPLY => "TYPEAPPLY"
+ case NEW => "NEW"
+ case TYPED => "TYPED"
+ case NAMEDARG => "NAMEDARG"
+ case ASSIGN => "ASSIGN"
+ case BLOCK => "BLOCK"
+ case IF => "IF"
+ case LAMBDA => "LAMBDA"
+ case MATCH => "MATCH"
+ case RETURN => "RETURN"
+ case INLINED => "INLINED"
+ case TRY => "TRY"
+ case REPEATED => "REPEATED"
+ case BIND => "BIND"
+ case ALTERNATIVE => "ALTERNATIVE"
+ case UNAPPLY => "UNAPPLY"
+ case ANNOTATEDtype => "ANNOTATEDtype"
+ case ANNOTATEDtpt => "ANNOTATEDtpt"
+ case CASEDEF => "CASEDEF"
+ case IMPLICITarg => "IMPLICITarg"
+ case TEMPLATE => "TEMPLATE"
+ case SELFDEF => "SELFDEF"
+ case THIS => "THIS"
+ case QUALTHIS => "QUALTHIS"
+ case SUPER => "SUPER"
+ case CLASSconst => "CLASSconst"
+ case ENUMconst => "ENUMconst"
+ case SINGLETONtpt => "SINGLETONtpt"
+ case SUPERtype => "SUPERtype"
+ case REFINEDtype => "REFINEDtype"
+ case REFINEDtpt => "REFINEDtpt"
+ case APPLIEDtype => "APPLIEDtype"
+ case APPLIEDtpt => "APPLIEDtpt"
+ case TYPEBOUNDS => "TYPEBOUNDS"
+ case TYPEBOUNDStpt => "TYPEBOUNDStpt"
+ case TYPEALIAS => "TYPEALIAS"
+ case ANDtype => "ANDtype"
+ case ANDtpt => "ANDtpt"
+ case ORtype => "ORtype"
+ case ORtpt => "ORtpt"
+ case BYNAMEtype => "BYNAMEtype"
+ case BYNAMEtpt => "BYNAMEtpt"
+ case POLYtype => "POLYtype"
+ case POLYtpt => "POLYtpt"
+ case METHODtype => "METHODtype"
+ case PARAMtype => "PARAMtype"
+ case ANNOTATION => "ANNOTATION"
+ case PRIVATEqualified => "PRIVATEqualified"
+ case PROTECTEDqualified => "PROTECTEDqualified"
+ }
+
+ /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry.
+ * If negative, minus the number of leading non-reference trees.
+ */
+ def numRefs(tag: Int) = tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | RETURN | BIND |
+ SELFDEF | REFINEDtype => 1
+ case RENAMED | PARAMtype => 2
+ case POLYtype | METHODtype => -1
+ case _ => 0
+ }
+
+ /** Map between variances and name prefixes */
+ val varianceToPrefix = Map(-1 -> '-', 0 -> '=', 1 -> '+')
+ val prefixToVariance = Map('-' -> -1, '=' -> 0, '+' -> 1)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala
new file mode 100644
index 000000000..26807115c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import core.Names.TermName
+import collection.mutable
+
+abstract class TastyName
+
+object TastyName {
+
+ case class NameRef(index: Int) extends AnyVal
+
+ case class Simple(name: TermName) extends TastyName
+ case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName
+ case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
+ case class Expanded(prefix: NameRef, original: NameRef) extends TastyName
+ case class ModuleClass(module: NameRef) extends TastyName
+ case class SuperAccessor(accessed: NameRef) extends TastyName
+ case class DefaultGetter(method: NameRef, num: Int) extends TastyName
+ case class Shadowed(original: NameRef) extends TastyName
+
+ class Table extends (NameRef => TastyName) {
+ private val names = new mutable.ArrayBuffer[TastyName]
+ def add(name: TastyName) = names += name
+ def apply(ref: NameRef) = names(ref.index)
+ def contents: Iterable[TastyName] = names
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
new file mode 100644
index 000000000..c844d522e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -0,0 +1,71 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyFormat._
+import collection.mutable
+import TastyBuffer._
+import java.util.UUID
+import core.Symbols.Symbol
+import ast.tpd
+
+class TastyPickler {
+
+ private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)]
+ val uuid = UUID.randomUUID()
+
+ private val headerBuffer = {
+ val buf = new TastyBuffer(24)
+ for (ch <- header) buf.writeByte(ch.toByte)
+ buf.writeNat(MajorVersion)
+ buf.writeNat(MinorVersion)
+ buf.writeUncompressedLong(uuid.getMostSignificantBits)
+ buf.writeUncompressedLong(uuid.getLeastSignificantBits)
+ buf
+ }
+
+ val nameBuffer = new NameBuffer
+
+ def newSection(name: String, buf: TastyBuffer) =
+ sections += ((nameBuffer.nameIndex(name), buf))
+
+ def assembleParts(): Array[Byte] = {
+ def lengthWithLength(buf: TastyBuffer) = {
+ buf.assemble()
+ buf.length + natSize(buf.length)
+ }
+ val totalSize =
+ headerBuffer.length +
+ lengthWithLength(nameBuffer) + {
+ for ((nameRef, buf) <- sections) yield
+ natSize(nameRef.index) + lengthWithLength(buf)
+ }.sum
+ val all = new TastyBuffer(totalSize)
+ all.writeBytes(headerBuffer.bytes, headerBuffer.length)
+ all.writeNat(nameBuffer.length)
+ all.writeBytes(nameBuffer.bytes, nameBuffer.length)
+ for ((nameRef, buf) <- sections) {
+ all.writeNat(nameRef.index)
+ all.writeNat(buf.length)
+ all.writeBytes(buf.bytes, buf.length)
+ }
+ assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}")
+ all.bytes
+ }
+
+ /** The address in the TASTY file of a given tree, or None if unknown.
+ * Note that trees are looked up by reference equality,
+ * so one can reliably use this function only directly after `pickler`.
+ */
+ var addrOfTree: tpd.Tree => Option[Addr] = (_ => None)
+
+ /**
+ * Addresses in TASTY file of symbols, stored by pickling.
+ * Note that trees are checked for reference equality,
+ * so one can reliably use this function only dirrectly after `pickler`
+ */
+ var addrOfSym: Symbol => Option[Addr] = (_ => None)
+
+ val treePkl = new TreePickler(this)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
new file mode 100644
index 000000000..0dc8d8fea
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -0,0 +1,122 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import Contexts._, Decorators._
+import printing.Texts._
+import TastyName._
+import StdNames._
+import TastyUnpickler._
+import TastyBuffer.Addr
+import util.Positions.{Position, offsetToInt}
+import collection.mutable
+
+class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
+
+ val unpickler = new TastyUnpickler(bytes)
+ import unpickler.{tastyName, unpickle}
+
+ def nameToString(name: TastyName): String = name match {
+ case Simple(name) => name.toString
+ case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name)
+ case Signed(original, params, result) =>
+ i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}"
+ case Expanded(prefix, original) => s"$prefix${nme.EXPAND_SEPARATOR}$original"
+ case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS"
+ case SuperAccessor(accessed) => nameRefToString(accessed) + "/SUPERACCESSOR"
+ case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num
+ case Shadowed(original) => nameRefToString(original) + "/SHADOWED"
+ }
+
+ def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref))
+
+ def printNames() =
+ for ((name, idx) <- tastyName.contents.zipWithIndex)
+ println(f"$idx%4d: " + nameToString(name))
+
+ def printContents(): Unit = {
+ println("Names:")
+ printNames()
+ println("Trees:")
+ unpickle(new TreeSectionUnpickler)
+ unpickle(new PositionSectionUnpickler)
+ }
+
+ class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") {
+ import TastyFormat._
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ import reader._
+ var indent = 0
+ def newLine() = print(f"\n ${index(currentAddr) - index(startAddr)}%5d:" + " " * indent)
+ def printNat() = print(" " + readNat())
+ def printName() = {
+ val idx = readNat()
+ print(" ") ;print(idx); print("["); print(nameRefToString(NameRef(idx))); print("]")
+ }
+ def printTree(): Unit = {
+ newLine()
+ val tag = readByte()
+ print(" ");print(astTagToString(tag))
+ indent += 2
+ if (tag >= firstLengthTreeTag) {
+ val len = readNat()
+ print(s"($len)")
+ val end = currentAddr + len
+ def printTrees() = until(end)(printTree())
+ tag match {
+ case RENAMED =>
+ printName(); printName()
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
+ printName(); printTrees()
+ case REFINEDtype =>
+ printName(); printTree(); printTrees()
+ case RETURN =>
+ printNat(); printTrees()
+ case METHODtype | POLYtype =>
+ printTree()
+ until(end) { printName(); printTree() }
+ case PARAMtype =>
+ printNat(); printNat()
+ case _ =>
+ printTrees()
+ }
+ if (currentAddr != end) {
+ println(s"incomplete read, current = $currentAddr, end = $end")
+ goto(end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) {
+ tag match {
+ case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName()
+ case _ => printNat()
+ }
+ printTree()
+ }
+ else if (tag >= firstASTTreeTag)
+ printTree()
+ else if (tag >= firstNatTreeTag)
+ tag match {
+ case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName()
+ case _ => printNat()
+ }
+ indent -= 2
+ }
+ println(i"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr")
+ println(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr")
+ while (!isAtEnd) {
+ printTree()
+ newLine()
+ }
+ }
+ }
+
+ class PositionSectionUnpickler extends SectionUnpickler[Unit]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ print(s"${reader.endAddr.index - reader.currentAddr.index}")
+ val positions = new PositionUnpickler(reader).positions
+ println(s" position bytes:")
+ val sorted = positions.toSeq.sortBy(_._1.index)
+ for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}")
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala
new file mode 100644
index 000000000..e583c4793
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala
@@ -0,0 +1,141 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyBuffer._
+import TastyName.NameRef
+import collection.mutable
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ *
+ * @param bytes The array containing data
+ * @param start The position from which to read
+ * @param end The position one greater than the last byte to be read
+ * @param base The index referenced by the logical zero address Addr(0)
+ */
+class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) {
+
+ def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length)
+
+ private var bp: Int = start
+
+ def addr(idx: Int) = Addr(idx - base)
+ def index(addr: Addr) = addr.index + base
+
+ /** The address of the first byte to read, respectively byte that was read */
+ def startAddr: Addr = addr(start)
+
+ /** The address of the next byte to read */
+ def currentAddr: Addr = addr(bp)
+
+ /** the address one greater than the last brte to read */
+ def endAddr: Addr = addr(end)
+
+ /** Have all bytes been read? */
+ def isAtEnd: Boolean = bp == end
+
+ /** A new reader over the same array with the same address base, but with
+ * specified start and end positions
+ */
+ def subReader(start: Addr, end: Addr): TastyReader =
+ new TastyReader(bytes, index(start), index(end), base)
+
+ /** Read a byte of data. */
+ def readByte(): Int = {
+ val result = bytes(bp) & 0xff
+ bp += 1
+ result
+ }
+
+ /** Returns the next byte of data as a natural number without advancing the read position */
+ def nextByte: Int = bytes(bp) & 0xff
+
+ /** Read the next `n` bytes of `data`. */
+ def readBytes(n: Int): Array[Byte] = {
+ val result = new Array[Byte](n)
+ Array.copy(bytes, bp, result, 0, n)
+ bp += n
+ result
+ }
+
+ /** Read a natural number fitting in an Int in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readNat(): Int = readLongNat.toInt
+
+ /** Read an integer number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readInt(): Int = readLongInt.toInt
+
+ /** Read a natural number fitting in a Long in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** Read a long integer number in 2's complement big endian format, base 128. */
+ def readLongInt(): Long = {
+ var b = bytes(bp)
+ var x: Long = (b << 1).toByte >> 1 // sign extend with bit 6.
+ bp += 1
+ while ((b & 0x80) == 0) {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ }
+ x
+ }
+
+ /** Read an uncompressed Long stored in 8 bytes in big endian format */
+ def readUncompressedLong(): Long = {
+ var x: Long = 0
+ for (i <- 0 to 7)
+ x = (x << 8) | (readByte() & 0xff)
+ x
+ }
+
+ /** Read a natural number and return as a NameRef */
+ def readNameRef() = NameRef(readNat())
+
+ /** Read a natural number and return as an address */
+ def readAddr() = Addr(readNat())
+
+ /** Read a length number and return the absolute end address implied by it,
+ * given as <address following length field> + <length-value-read>.
+ */
+ def readEnd(): Addr = addr(readNat() + bp)
+
+ /** Set read position to the one pointed to by `addr` */
+ def goto(addr: Addr): Unit =
+ bp = index(addr)
+
+ /** Perform `op` until `end` address is reached and collect results in a list. */
+ def until[T](end: Addr)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (bp < index(end)) buf += op
+ assert(bp == index(end))
+ buf.toList
+ }
+
+ /** If before given `end` address, the result of `op`, otherwise `default` */
+ def ifBefore[T](end: Addr)(op: => T, default: T): T =
+ if (bp < index(end)) op else default
+
+ /** Perform `op` while cindition `cond` holds and collect results in a list. */
+ def collectWhile[T](cond: => Boolean)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (cond) buf += op
+ buf.toList
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
new file mode 100644
index 000000000..8a1f58acd
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
@@ -0,0 +1,95 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import scala.collection.mutable
+import TastyFormat._
+import Names.{Name, termName}
+import java.util.UUID
+
+object TastyUnpickler {
+ class UnpickleException(msg: String) extends Exception(msg)
+
+ abstract class SectionUnpickler[R](val name: String) {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): R
+ }
+}
+
+import TastyUnpickler._
+
+class TastyUnpickler(reader: TastyReader) {
+ import reader._
+
+ def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
+
+ private val sectionReader = new mutable.HashMap[String, TastyReader]
+ val tastyName = new TastyName.Table
+
+ def check(cond: Boolean, msg: => String) =
+ if (!cond) throw new UnpickleException(msg)
+
+ def readString(): String = {
+ val TastyName.Simple(name) = tastyName(readNameRef())
+ name.toString
+ }
+
+ def readName(): TastyName = {
+ import TastyName._
+ val tag = readByte()
+ val length = readNat()
+ val start = currentAddr
+ val end = start + length
+ val result = tag match {
+ case UTF8 =>
+ goto(end)
+ Simple(termName(bytes, start.index, length))
+ case QUALIFIED =>
+ Qualified(readNameRef(), readNameRef())
+ case SIGNED =>
+ val original = readNameRef()
+ val result = readNameRef()
+ val params = until(end)(readNameRef())
+ Signed(original, params, result)
+ case EXPANDED =>
+ Expanded(readNameRef(), readNameRef())
+ case OBJECTCLASS =>
+ ModuleClass(readNameRef())
+ case SUPERACCESSOR =>
+ SuperAccessor(readNameRef())
+ case DEFAULTGETTER =>
+ DefaultGetter(readNameRef(), readNat())
+ case SHADOWED =>
+ Shadowed(readNameRef())
+ }
+ assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
+ result
+ }
+
+ private def readHeader(): UUID = {
+ for (i <- 0 until header.length)
+ check(readByte() == header(i), "not a TASTy file")
+ val major = readNat()
+ val minor = readNat()
+ check(major == MajorVersion && minor <= MinorVersion,
+ s"""TASTy signature has wrong version.
+ | expected: $MajorVersion.$MinorVersion
+ | found : $major.$minor""".stripMargin)
+ new UUID(readUncompressedLong(), readUncompressedLong())
+ }
+
+ val uuid = readHeader()
+
+ locally {
+ until(readEnd()) { tastyName.add(readName()) }
+ while (!isAtEnd) {
+ val secName = readString()
+ val secEnd = readEnd()
+ sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index)
+ goto(secEnd)
+ }
+ }
+
+ def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
+ for (reader <- sectionReader.get(sec.name)) yield
+ sec.unpickle(reader, tastyName)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
new file mode 100644
index 000000000..6c7982d78
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.{bestFit, dble}
+import TastyBuffer.{Addr, AddrWidth}
+import config.Printers.pickling
+import ast.untpd.Tree
+
+class TreeBuffer extends TastyBuffer(50000) {
+
+ private final val ItemsOverOffsets = 2
+ private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
+ private var offsets = new Array[Int](initialOffsetSize)
+ private var isRelative = new Array[Boolean](initialOffsetSize)
+ private var delta: Array[Int] = _
+ private var numOffsets = 0
+
+ /** A map from trees to the address at which a tree is pickled. */
+ private val treeAddrs = new java.util.IdentityHashMap[Tree, Any] // really: Addr | Null
+
+ def registerTreeAddr(tree: Tree): Addr = treeAddrs.get(tree) match {
+ case null => treeAddrs.put(tree, currentAddr); currentAddr
+ case addr: Addr => addr
+ }
+
+ def addrOfTree(tree: Tree): Option[Addr] = treeAddrs.get(tree) match {
+ case null => None
+ case addr: Addr => Some(addr)
+ }
+
+ private def offset(i: Int): Addr = Addr(offsets(i))
+
+ private def keepOffset(relative: Boolean): Unit = {
+ if (numOffsets == offsets.length) {
+ offsets = dble(offsets)
+ isRelative = dble(isRelative)
+ }
+ offsets(numOffsets) = length
+ isRelative(numOffsets) = relative
+ numOffsets += 1
+ }
+
+ /** Reserve space for a reference, to be adjusted later */
+ def reserveRef(relative: Boolean): Addr = {
+ val addr = currentAddr
+ keepOffset(relative)
+ reserveAddr()
+ addr
+ }
+
+ /** Write reference right adjusted into freshly reserved field. */
+ def writeRef(target: Addr) = {
+ keepOffset(relative = false)
+ fillAddr(reserveAddr(), target)
+ }
+
+ /** Fill previously reserved field with a reference */
+ def fillRef(at: Addr, target: Addr, relative: Boolean) = {
+ val addr = if (relative) target.relativeTo(at) else target
+ fillAddr(at, addr)
+ }
+
+ /** The amount by which the bytes at the given address are shifted under compression */
+ def deltaAt(at: Addr): Int = {
+ val idx = bestFit(offsets, numOffsets, at.index - 1)
+ if (idx < 0) 0 else delta(idx)
+ }
+
+ /** The address to which `x` is translated under compression */
+ def adjusted(x: Addr): Addr = x - deltaAt(x)
+
+ /** Compute all shift-deltas */
+ private def computeDeltas() = {
+ delta = new Array[Int](numOffsets)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val off = offset(i)
+ val skippedOff = skipZeroes(off)
+ val skippedCount = skippedOff.index - off.index
+ assert(skippedCount < AddrWidth, s"unset field at position $off")
+ lastDelta += skippedCount
+ delta(i) = lastDelta
+ i += 1
+ }
+ }
+
+ /** The absolute or relative adjusted address at index `i` of `offsets` array*/
+ private def adjustedOffset(i: Int): Addr = {
+ val at = offset(i)
+ val original = getAddr(at)
+ if (isRelative(i)) {
+ val start = skipNat(at)
+ val len1 = original + delta(i) - deltaAt(original + start.index)
+ val len2 = adjusted(original + start.index) - adjusted(start).index
+ assert(len1 == len2,
+ s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2")
+ len1
+ } else adjusted(original)
+ }
+
+ /** Adjust all offsets according to previously computed deltas */
+ private def adjustOffsets(): Unit = {
+ for (i <- 0 until numOffsets) {
+ val corrected = adjustedOffset(i)
+ fillAddr(offset(i), corrected)
+ }
+ }
+
+ /** Adjust deltas to also take account references that will shrink (and thereby
+ * generate additional zeroes that can be skipped) due to previously
+ * computed adjustments.
+ */
+ private def adjustDeltas(): Int = {
+ val delta1 = new Array[Int](delta.length)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val corrected = adjustedOffset(i)
+ lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index)
+ delta1(i) = lastDelta
+ i += 1
+ }
+ val saved =
+ if (numOffsets == 0) 0
+ else delta1(numOffsets - 1) - delta(numOffsets - 1)
+ delta = delta1
+ saved
+ }
+
+ /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */
+ private def compress(): Int = {
+ var lastDelta = 0
+ var start = 0
+ var i = 0
+ var wasted = 0
+ def shift(end: Int) =
+ Array.copy(bytes, start, bytes, start - lastDelta, end - start)
+ while (i < numOffsets) {
+ val next = offsets(i)
+ shift(next)
+ start = next + delta(i) - lastDelta
+ val pastZeroes = skipZeroes(Addr(next)).index
+ assert(pastZeroes >= start, s"something's wrong: eliminated non-zero")
+ wasted += (pastZeroes - start)
+ lastDelta = delta(i)
+ i += 1
+ }
+ shift(length)
+ length -= lastDelta
+ wasted
+ }
+
+ def adjustTreeAddrs(): Unit = {
+ val it = treeAddrs.keySet.iterator
+ while (it.hasNext) {
+ val tree = it.next
+ treeAddrs.get(tree) match {
+ case addr: Addr => treeAddrs.put(tree, adjusted(addr))
+ case addrs: List[Addr] => treeAddrs.put(tree, addrs.map(adjusted))
+ }
+ }
+ }
+
+ /** Final assembly, involving the following steps:
+ * - compute deltas
+ * - adjust deltas until additional savings are < 1% of total
+ * - adjust offsets according to the adjusted deltas
+ * - shrink buffer, skipping zeroes.
+ */
+ def compactify(): Unit = {
+ val origLength = length
+ computeDeltas()
+ //println(s"offsets: ${offsets.take(numOffsets).deep}")
+ //println(s"deltas: ${delta.take(numOffsets).deep}")
+ var saved = 0
+ do {
+ saved = adjustDeltas()
+ pickling.println(s"adjusting deltas, saved = $saved")
+ } while (saved > 0 && length / saved < 100)
+ adjustOffsets()
+ adjustTreeAddrs()
+ val wasted = compress()
+ pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now.
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
new file mode 100644
index 000000000..80270aa25
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -0,0 +1,641 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast.Trees._
+import ast.untpd
+import TastyFormat._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._
+import collection.mutable
+import typer.Inliner
+import NameOps._
+import StdNames.nme
+import TastyBuffer._
+import TypeApplications._
+
+class TreePickler(pickler: TastyPickler) {
+ val buf = new TreeBuffer
+ pickler.newSection("ASTs", buf)
+ import buf._
+ import pickler.nameBuffer.{nameIndex, fullNameIndex}
+ import ast.tpd._
+
+ private val symRefs = new mutable.HashMap[Symbol, Addr]
+ private val forwardSymRefs = new mutable.HashMap[Symbol, List[Addr]]
+ private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null
+
+ private def withLength(op: => Unit) = {
+ val lengthAddr = reserveRef(relative = true)
+ op
+ fillRef(lengthAddr, currentAddr, relative = true)
+ }
+
+ def addrOfSym(sym: Symbol): Option[Addr] = {
+ symRefs.get(sym)
+ }
+
+ def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case tree: MemberDef =>
+ if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr
+ case _ =>
+ }
+
+ def registerDef(sym: Symbol): Unit = {
+ symRefs(sym) = currentAddr
+ forwardSymRefs.get(sym) match {
+ case Some(refs) =>
+ refs.foreach(fillRef(_, currentAddr, relative = false))
+ forwardSymRefs -= sym
+ case None =>
+ }
+ }
+
+ private def pickleName(name: Name): Unit = writeNat(nameIndex(name).index)
+ private def pickleName(name: TastyName): Unit = writeNat(nameIndex(name).index)
+ private def pickleNameAndSig(name: Name, sig: Signature) = {
+ val Signature(params, result) = sig
+ pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result)))
+ }
+
+ private def pickleName(sym: Symbol)(implicit ctx: Context): Unit = {
+ def encodeSuper(name: Name): TastyName.NameRef =
+ if (sym is Flags.SuperAccessor) {
+ val SuperAccessorName(n) = name
+ nameIndex(TastyName.SuperAccessor(nameIndex(n)))
+ }
+ else nameIndex(name)
+ val nameRef =
+ if (sym is Flags.ExpandedName)
+ nameIndex(
+ TastyName.Expanded(
+ nameIndex(sym.name.expandedPrefix),
+ encodeSuper(sym.name.unexpandedName)))
+ else encodeSuper(sym.name)
+ writeNat(nameRef.index)
+ }
+
+ private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) =>
+ if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
+ case None =>
+ // See pos/t1957.scala for an example where this can happen.
+ // I believe it's a bug in typer: the type of an implicit argument refers
+ // to a closure parameter outside the closure itself. TODO: track this down, so that we
+ // can eliminate this case.
+ ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
+ pickleForwardSymRef(sym)
+ }
+
+ private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = {
+ val ref = reserveRef(relative = false)
+ assert(!sym.is(Flags.Package), sym)
+ forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
+ }
+
+ private def isLocallyDefined(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) => assert(sym.exists); label != NoAddr
+ case None => false
+ }
+
+ def pickleConstant(c: Constant)(implicit ctx: Context): Unit = c.tag match {
+ case UnitTag =>
+ writeByte(UNITconst)
+ case BooleanTag =>
+ writeByte(if (c.booleanValue) TRUEconst else FALSEconst)
+ case ByteTag =>
+ writeByte(BYTEconst)
+ writeInt(c.byteValue)
+ case ShortTag =>
+ writeByte(SHORTconst)
+ writeInt(c.shortValue)
+ case CharTag =>
+ writeByte(CHARconst)
+ writeNat(c.charValue)
+ case IntTag =>
+ writeByte(INTconst)
+ writeInt(c.intValue)
+ case LongTag =>
+ writeByte(LONGconst)
+ writeLongInt(c.longValue)
+ case FloatTag =>
+ writeByte(FLOATconst)
+ writeInt(java.lang.Float.floatToRawIntBits(c.floatValue))
+ case DoubleTag =>
+ writeByte(DOUBLEconst)
+ writeLongInt(java.lang.Double.doubleToRawLongBits(c.doubleValue))
+ case StringTag =>
+ writeByte(STRINGconst)
+ writeNat(nameIndex(c.stringValue).index)
+ case NullTag =>
+ writeByte(NULLconst)
+ case ClazzTag =>
+ writeByte(CLASSconst)
+ pickleType(c.typeValue)
+ case EnumTag =>
+ writeByte(ENUMconst)
+ pickleType(c.symbolValue.termRef)
+ }
+
+ def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = try {
+ val tpe = tpe0.stripTypeVar
+ val prev = pickledTypes.get(tpe)
+ if (prev == null) {
+ pickledTypes.put(tpe, currentAddr)
+ pickleNewType(tpe, richTypes)
+ }
+ else {
+ writeByte(SHARED)
+ writeRef(prev.asInstanceOf[Addr])
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(i"error when pickling type $tpe0")
+ throw ex
+ }
+
+ private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = try { tpe match {
+ case AppliedType(tycon, args) =>
+ writeByte(APPLIEDtype)
+ withLength { pickleType(tycon); args.foreach(pickleType(_)) }
+ case ConstantType(value) =>
+ pickleConstant(value)
+ case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
+ pickleType(tpe.superType)
+ case tpe: WithFixedSym =>
+ val sym = tpe.symbol
+ def pickleRef() =
+ if (tpe.prefix == NoPrefix) {
+ writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
+ pickleSymRef(sym)
+ }
+ else {
+ assert(tpe.symbol.isClass)
+ assert(tpe.symbol.is(Flags.Scala2x), tpe.symbol.showLocated)
+ writeByte(TYPEREF) // should be changed to a new entry that keeps track of prefix, symbol & owner
+ pickleName(tpe.name)
+ pickleType(tpe.prefix)
+ }
+ if (sym.is(Flags.Package)) {
+ writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg)
+ pickleName(qualifiedName(sym))
+ }
+ else if (sym is Flags.BindDefinedType) {
+ registerDef(sym)
+ writeByte(BIND)
+ withLength {
+ pickleName(sym.name)
+ pickleType(sym.info)
+ pickleRef()
+ }
+ }
+ else pickleRef()
+ case tpe: TermRefWithSignature =>
+ if (tpe.symbol.is(Flags.Package)) picklePackageRef(tpe.symbol)
+ else {
+ writeByte(TERMREF)
+ pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix)
+ }
+ case tpe: NamedType =>
+ if (isLocallyDefined(tpe.symbol)) {
+ writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol)
+ pickleSymRef(tpe.symbol); pickleType(tpe.prefix)
+ } else {
+ writeByte(if (tpe.isType) TYPEREF else TERMREF)
+ pickleName(tpe.name); pickleType(tpe.prefix)
+ }
+ case tpe: ThisType =>
+ if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot)
+ picklePackageRef(tpe.cls)
+ else {
+ writeByte(THIS)
+ pickleType(tpe.tref)
+ }
+ case tpe: SuperType =>
+ writeByte(SUPERtype)
+ withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)}
+ case tpe: RecThis =>
+ writeByte(RECthis)
+ val binderAddr = pickledTypes.get(tpe.binder)
+ assert(binderAddr != null, tpe.binder)
+ writeRef(binderAddr.asInstanceOf[Addr])
+ case tpe: SkolemType =>
+ pickleType(tpe.info)
+ case tpe: RefinedType =>
+ writeByte(REFINEDtype)
+ withLength {
+ pickleName(tpe.refinedName)
+ pickleType(tpe.parent)
+ pickleType(tpe.refinedInfo, richTypes = true)
+ }
+ case tpe: RecType =>
+ writeByte(RECtype)
+ pickleType(tpe.parent)
+ case tpe: TypeAlias =>
+ writeByte(TYPEALIAS)
+ withLength {
+ pickleType(tpe.alias, richTypes)
+ tpe.variance match {
+ case 1 => writeByte(COVARIANT)
+ case -1 => writeByte(CONTRAVARIANT)
+ case 0 =>
+ }
+ }
+ case tpe: TypeBounds =>
+ writeByte(TYPEBOUNDS)
+ withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) }
+ case tpe: AnnotatedType =>
+ writeByte(ANNOTATEDtype)
+ withLength { pickleType(tpe.tpe, richTypes); pickleTree(tpe.annot.tree) }
+ case tpe: AndOrType =>
+ writeByte(if (tpe.isAnd) ANDtype else ORtype)
+ withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) }
+ case tpe: ExprType =>
+ writeByte(BYNAMEtype)
+ pickleType(tpe.underlying)
+ case tpe: PolyType =>
+ writeByte(POLYtype)
+ val paramNames = tpe.typeParams.map(tparam =>
+ varianceToPrefix(tparam.paramVariance) +: tparam.paramName)
+ pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds)
+ case tpe: MethodType if richTypes =>
+ writeByte(METHODtype)
+ pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes)
+ case tpe: PolyParam =>
+ if (!pickleParamType(tpe))
+ // TODO figure out why this case arises in e.g. pickling AbstractFileReader.
+ ctx.typerState.constraint.entry(tpe) match {
+ case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes)
+ case _ => assert(false, s"orphan poly parameter: $tpe")
+ }
+ case tpe: MethodParam =>
+ assert(pickleParamType(tpe), s"orphan method parameter: $tpe")
+ case tpe: LazyRef =>
+ pickleType(tpe.ref)
+ }} catch {
+ case ex: AssertionError =>
+ println(i"error while pickling type $tpe")
+ throw ex
+ }
+
+ def picklePackageRef(pkg: Symbol)(implicit ctx: Context): Unit = {
+ writeByte(TERMREFpkg)
+ pickleName(qualifiedName(pkg))
+ }
+
+ def pickleMethodic(result: Type, names: List[Name], types: List[Type])(implicit ctx: Context) =
+ withLength {
+ pickleType(result, richTypes = true)
+ (names, types).zipped.foreach { (name, tpe) =>
+ pickleName(name); pickleType(tpe)
+ }
+ }
+
+ def pickleParamType(tpe: ParamType)(implicit ctx: Context): Boolean = {
+ val binder = pickledTypes.get(tpe.binder)
+ val pickled = binder != null
+ if (pickled) {
+ writeByte(PARAMtype)
+ withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) }
+ }
+ pickled
+ }
+
+ def pickleTpt(tpt: Tree)(implicit ctx: Context): Unit =
+ pickleTree(tpt)
+
+ def pickleTreeUnlessEmpty(tree: Tree)(implicit ctx: Context): Unit =
+ if (!tree.isEmpty) pickleTree(tree)
+
+ def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(implicit ctx: Context) = {
+ assert(symRefs(sym) == NoAddr, sym)
+ registerDef(sym)
+ writeByte(tag)
+ withLength {
+ pickleName(sym)
+ pickleParams
+ tpt match {
+ case templ: Template => pickleTree(tpt)
+ case _ if tpt.isType => pickleTpt(tpt)
+ }
+ pickleTreeUnlessEmpty(rhs)
+ pickleModifiers(sym)
+ }
+ }
+
+ def pickleParam(tree: Tree)(implicit ctx: Context): Unit = {
+ registerTreeAddr(tree)
+ tree match {
+ case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt)
+ case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs)
+ case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
+ }
+ }
+
+ def pickleParams(trees: List[Tree])(implicit ctx: Context): Unit = {
+ trees.foreach(preRegister)
+ trees.foreach(pickleParam)
+ }
+
+ def pickleStats(stats: List[Tree])(implicit ctx: Context) = {
+ stats.foreach(preRegister)
+ stats.foreach(stat => if (!stat.isEmpty) pickleTree(stat))
+ }
+
+ def pickleTree(tree: Tree)(implicit ctx: Context): Unit = {
+ val addr = registerTreeAddr(tree)
+ if (addr != currentAddr) {
+ writeByte(SHARED)
+ writeRef(addr)
+ }
+ else
+ try tree match {
+ case Ident(name) =>
+ tree.tpe match {
+ case tp: TermRef if name != nme.WILDCARD =>
+ // wildcards are pattern bound, need to be preserved as ids.
+ pickleType(tp)
+ case _ =>
+ writeByte(if (tree.isType) IDENTtpt else IDENT)
+ pickleName(name)
+ pickleType(tree.tpe)
+ }
+ case This(qual) =>
+ if (qual.isEmpty) pickleType(tree.tpe)
+ else {
+ writeByte(QUALTHIS)
+ val ThisType(tref) = tree.tpe
+ pickleTree(qual.withType(tref))
+ }
+ case Select(qual, name) =>
+ writeByte(if (name.isTypeName) SELECTtpt else SELECT)
+ val realName = tree.tpe match {
+ case tp: NamedType if tp.name.isShadowedName => tp.name
+ case _ => name
+ }
+ val sig = tree.tpe.signature
+ if (sig == Signature.NotAMethod) pickleName(realName)
+ else pickleNameAndSig(realName, sig)
+ pickleTree(qual)
+ case Apply(fun, args) =>
+ writeByte(APPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTree)
+ }
+ case TypeApply(fun, args) =>
+ writeByte(TYPEAPPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTpt)
+ }
+ case Literal(const1) =>
+ pickleConstant {
+ tree.tpe match {
+ case ConstantType(const2) => const2
+ case _ => const1
+ }
+ }
+ case Super(qual, mix) =>
+ writeByte(SUPER)
+ withLength {
+ pickleTree(qual);
+ if (!mix.isEmpty) {
+ val SuperType(_, mixinType: TypeRef) = tree.tpe
+ pickleTree(mix.withType(mixinType))
+ }
+ }
+ case New(tpt) =>
+ writeByte(NEW)
+ pickleTpt(tpt)
+ case Typed(expr, tpt) =>
+ writeByte(TYPED)
+ withLength { pickleTree(expr); pickleTpt(tpt) }
+ case NamedArg(name, arg) =>
+ writeByte(NAMEDARG)
+ withLength { pickleName(name); pickleTree(arg) }
+ case Assign(lhs, rhs) =>
+ writeByte(ASSIGN)
+ withLength { pickleTree(lhs); pickleTree(rhs) }
+ case Block(stats, expr) =>
+ writeByte(BLOCK)
+ stats.foreach(preRegister)
+ withLength { pickleTree(expr); stats.foreach(pickleTree) }
+ case If(cond, thenp, elsep) =>
+ writeByte(IF)
+ withLength { pickleTree(cond); pickleTree(thenp); pickleTree(elsep) }
+ case Closure(env, meth, tpt) =>
+ writeByte(LAMBDA)
+ assert(env.isEmpty)
+ withLength {
+ pickleTree(meth)
+ if (tpt.tpe.exists) pickleTpt(tpt)
+ }
+ case Match(selector, cases) =>
+ writeByte(MATCH)
+ withLength { pickleTree(selector); cases.foreach(pickleTree) }
+ case CaseDef(pat, guard, rhs) =>
+ writeByte(CASEDEF)
+ withLength { pickleTree(pat); pickleTree(rhs); pickleTreeUnlessEmpty(guard) }
+ case Return(expr, from) =>
+ writeByte(RETURN)
+ withLength { pickleSymRef(from.symbol); pickleTreeUnlessEmpty(expr) }
+ case Try(block, cases, finalizer) =>
+ writeByte(TRY)
+ withLength { pickleTree(block); cases.foreach(pickleTree); pickleTreeUnlessEmpty(finalizer) }
+ case SeqLiteral(elems, elemtpt) =>
+ writeByte(REPEATED)
+ withLength { pickleTree(elemtpt); elems.foreach(pickleTree) }
+ case Inlined(call, bindings, expansion) =>
+ writeByte(INLINED)
+ bindings.foreach(preRegister)
+ withLength { pickleTree(call); pickleTree(expansion); bindings.foreach(pickleTree) }
+ case Bind(name, body) =>
+ registerDef(tree.symbol)
+ writeByte(BIND)
+ withLength { pickleName(name); pickleType(tree.symbol.info); pickleTree(body) }
+ case Alternative(alts) =>
+ writeByte(ALTERNATIVE)
+ withLength { alts.foreach(pickleTree) }
+ case UnApply(fun, implicits, patterns) =>
+ writeByte(UNAPPLY)
+ withLength {
+ pickleTree(fun)
+ for (implicitArg <- implicits) {
+ writeByte(IMPLICITarg)
+ pickleTree(implicitArg)
+ }
+ pickleType(tree.tpe)
+ patterns.foreach(pickleTree)
+ }
+ case tree: ValDef =>
+ pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs)
+ case tree: DefDef =>
+ def pickleAllParams = {
+ pickleParams(tree.tparams)
+ for (vparams <- tree.vparamss) {
+ writeByte(PARAMS)
+ withLength { pickleParams(vparams) }
+ }
+ }
+ pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams)
+ case tree: TypeDef =>
+ pickleDef(TYPEDEF, tree.symbol, tree.rhs)
+ case tree: Template =>
+ registerDef(tree.symbol)
+ writeByte(TEMPLATE)
+ val (params, rest) = tree.body partition {
+ case stat: TypeDef => stat.symbol is Flags.Param
+ case stat: ValOrDefDef =>
+ stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter
+ case _ => false
+ }
+ withLength {
+ pickleParams(params)
+ tree.parents.foreach(pickleTree)
+ val cinfo @ ClassInfo(_, _, _, _, selfInfo) = tree.symbol.owner.info
+ if ((selfInfo ne NoType) || !tree.self.isEmpty) {
+ writeByte(SELFDEF)
+ pickleName(tree.self.name)
+
+ if (!tree.self.tpt.isEmpty) pickleTree(tree.self.tpt)
+ else {
+ if (!tree.self.isEmpty) registerTreeAddr(tree.self)
+ pickleType {
+ cinfo.selfInfo match {
+ case sym: Symbol => sym.info
+ case tp: Type => tp
+ }
+ }
+ }
+ }
+ pickleStats(tree.constr :: rest)
+ }
+ case Import(expr, selectors) =>
+ writeByte(IMPORT)
+ withLength {
+ pickleTree(expr)
+ selectors foreach {
+ case Thicket((from @ Ident(_)) :: (to @ Ident(_)) :: Nil) =>
+ pickleSelector(IMPORTED, from)
+ pickleSelector(RENAMED, to)
+ case id @ Ident(_) =>
+ pickleSelector(IMPORTED, id)
+ }
+ }
+ case PackageDef(pid, stats) =>
+ writeByte(PACKAGE)
+ withLength { pickleType(pid.tpe); pickleStats(stats) }
+ case tree: TypeTree =>
+ pickleType(tree.tpe)
+ case SingletonTypeTree(ref) =>
+ writeByte(SINGLETONtpt)
+ pickleTree(ref)
+ case RefinedTypeTree(parent, refinements) =>
+ if (refinements.isEmpty) pickleTree(parent)
+ else {
+ val refineCls = refinements.head.symbol.owner.asClass
+ pickledTypes.put(refineCls.typeRef, currentAddr)
+ writeByte(REFINEDtpt)
+ refinements.foreach(preRegister)
+ withLength { pickleTree(parent); refinements.foreach(pickleTree) }
+ }
+ case AppliedTypeTree(tycon, args) =>
+ writeByte(APPLIEDtpt)
+ withLength { pickleTree(tycon); args.foreach(pickleTree) }
+ case AndTypeTree(tp1, tp2) =>
+ writeByte(ANDtpt)
+ withLength { pickleTree(tp1); pickleTree(tp2) }
+ case OrTypeTree(tp1, tp2) =>
+ writeByte(ORtpt)
+ withLength { pickleTree(tp1); pickleTree(tp2) }
+ case ByNameTypeTree(tp) =>
+ writeByte(BYNAMEtpt)
+ pickleTree(tp)
+ case Annotated(tree, annot) =>
+ writeByte(ANNOTATEDtpt)
+ withLength { pickleTree(tree); pickleTree(annot.tree) }
+ case PolyTypeTree(tparams, body) =>
+ writeByte(POLYtpt)
+ withLength { pickleParams(tparams); pickleTree(body) }
+ case TypeBoundsTree(lo, hi) =>
+ writeByte(TYPEBOUNDStpt)
+ withLength { pickleTree(lo); pickleTree(hi) }
+ }
+ catch {
+ case ex: AssertionError =>
+ println(i"error when pickling tree $tree")
+ throw ex
+ }
+ }
+
+ def pickleSelector(tag: Int, id: untpd.Ident)(implicit ctx: Context): Unit = {
+ registerTreeAddr(id)
+ writeByte(tag)
+ pickleName(id.name)
+ }
+
+ def qualifiedName(sym: Symbol)(implicit ctx: Context): TastyName =
+ if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName)
+ else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name))
+
+ def pickleModifiers(sym: Symbol)(implicit ctx: Context): Unit = {
+ import Flags._
+ val flags = sym.flags
+ val privateWithin = sym.privateWithin
+ if (privateWithin.exists) {
+ writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified)
+ pickleType(privateWithin.typeRef)
+ }
+ if (flags is Private) writeByte(PRIVATE)
+ if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED)
+ if ((flags is Final) && !(sym is Module)) writeByte(FINAL)
+ if (flags is Case) writeByte(CASE)
+ if (flags is Override) writeByte(OVERRIDE)
+ if (flags is Inline) writeByte(INLINE)
+ if (flags is JavaStatic) writeByte(STATIC)
+ if (flags is Module) writeByte(OBJECT)
+ if (flags is Local) writeByte(LOCAL)
+ if (flags is Synthetic) writeByte(SYNTHETIC)
+ if (flags is Artifact) writeByte(ARTIFACT)
+ if (flags is Scala2x) writeByte(SCALA2X)
+ if (flags is InSuperCall) writeByte(INSUPERCALL)
+ if (sym.isTerm) {
+ if (flags is Implicit) writeByte(IMPLICIT)
+ if ((flags is Lazy) && !(sym is Module)) writeByte(LAZY)
+ if (flags is AbsOverride) { writeByte(ABSTRACT); writeByte(OVERRIDE) }
+ if (flags is Mutable) writeByte(MUTABLE)
+ if (flags is Accessor) writeByte(FIELDaccessor)
+ if (flags is CaseAccessor) writeByte(CASEaccessor)
+ if (flags is DefaultParameterized) writeByte(DEFAULTparameterized)
+ if (flags is Stable) writeByte(STABLE)
+ } else {
+ if (flags is Sealed) writeByte(SEALED)
+ if (flags is Abstract) writeByte(ABSTRACT)
+ if (flags is Trait) writeByte(TRAIT)
+ if (flags is Covariant) writeByte(COVARIANT)
+ if (flags is Contravariant) writeByte(CONTRAVARIANT)
+ }
+ sym.annotations.foreach(pickleAnnotation)
+ }
+
+ def pickleAnnotation(ann: Annotation)(implicit ctx: Context) =
+ if (ann.symbol != defn.BodyAnnot) { // inline bodies are reconstituted automatically when unpickling
+ writeByte(ANNOTATION)
+ withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
+ }
+
+ def pickle(trees: List[Tree])(implicit ctx: Context) = {
+ trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree))
+ assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, % when pickling ${ctx.source}")
+ }
+
+ def compactify() = {
+ buf.compactify()
+
+ def updateMapWithDeltas[T](mp: collection.mutable.Map[T, Addr]) =
+ for (key <- mp.keysIterator.toBuffer[T]) mp(key) = adjusted(mp(key))
+
+ updateMapWithDeltas(symRefs)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
new file mode 100644
index 000000000..eba9ab533
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -0,0 +1,1161 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
+import StdNames._, Denotations._, Flags._, Constants._, Annotations._
+import util.Positions._
+import ast.{tpd, Trees, untpd}
+import Trees._
+import Decorators._
+import TastyUnpickler._, TastyBuffer._
+import scala.annotation.{tailrec, switch}
+import scala.collection.mutable.ListBuffer
+import scala.collection.{ mutable, immutable }
+import config.Printers.pickling
+
+/** Unpickler for typed trees
+ * @param reader the reader from which to unpickle
+ * @param tastyName the nametable
+ * @param posUNpicklerOpt the unpickler for positions, if it exists
+ */
+class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table, posUnpicklerOpt: Option[PositionUnpickler]) {
+ import TastyFormat._
+ import TastyName._
+ import TreeUnpickler._
+ import tpd._
+
+ /** A map from addresses of definition entries to the symbols they define */
+ private val symAtAddr = new mutable.HashMap[Addr, Symbol]
+
+ /** A temporary map from addresses of definition entries to the trees they define.
+ * Used to remember trees of symbols that are created by a completion. Emptied
+ * once the tree is inlined into a larger tree.
+ */
+ private val treeAtAddr = new mutable.HashMap[Addr, Tree]
+
+ /** A map from addresses of type entries to the types they define.
+ * Currently only populated for types that might be recursively referenced
+ * from within themselves (i.e. RefinedTypes, PolyTypes, MethodTypes).
+ */
+ private val typeAtAddr = new mutable.HashMap[Addr, Type]
+
+ /** The root symbol denotation which are defined by the Tasty file associated with this
+ * TreeUnpickler. Set by `enterTopLevel`.
+ */
+ private var roots: Set[SymDenotation] = null
+
+ /** The root symbols that are defined in this Tasty file. This
+ * is a subset of `roots.map(_.symbol)`.
+ */
+ private var seenRoots: Set[Symbol] = Set()
+
+ /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */
+ private var ownerTree: OwnerTree = _
+
+ private def registerSym(addr: Addr, sym: Symbol) =
+ symAtAddr(addr) = sym
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = {
+ this.roots = roots
+ var rdr = new TreeReader(reader).fork
+ ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr)
+ rdr.indexStats(reader.endAddr)
+ }
+
+ /** The unpickled trees */
+ def unpickle()(implicit ctx: Context): List[Tree] = {
+ assert(roots != null, "unpickle without previous enterTopLevel")
+ new TreeReader(reader).readTopLevel()(ctx.addMode(Mode.AllowDependentFunctions))
+ }
+
+ def toTermName(tname: TastyName): TermName = tname match {
+ case Simple(name) => name
+ case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
+ case Signed(original, params, result) => toTermName(original)
+ case Shadowed(original) => toTermName(original).shadowedName
+ case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
+ case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
+ case SuperAccessor(accessed) => toTermName(accessed).superName
+ case DefaultGetter(meth, num) => ???
+ }
+
+ def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref))
+ def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName
+
+ class Completer(owner: Symbol, reader: TastyReader) extends LazyType {
+ import reader._
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ treeAtAddr(currentAddr) =
+ new TreeReader(reader).readIndexedDef()(
+ ctx.withPhaseNoLater(ctx.picklerPhase).withOwner(owner))
+ }
+ }
+
+ class TreeReader(val reader: TastyReader) {
+ import reader._
+
+ def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
+ def fork = forkAt(currentAddr)
+
+ def skipTree(tag: Int): Unit =
+ if (tag >= firstLengthTreeTag) goto(readEnd())
+ else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
+ else if (tag >= firstASTTreeTag) skipTree()
+ else if (tag >= firstNatTreeTag) readNat()
+ def skipTree(): Unit = skipTree(readByte())
+
+ def skipParams(): Unit =
+ while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
+
+ /** Record all directly nested definitions and templates in current tree
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTree(buf: ListBuffer[OwnerTree], mode: MemberDefMode = AllDefs): Unit = {
+ val start = currentAddr
+ val tag = readByte()
+ tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | TEMPLATE =>
+ val end = readEnd()
+ for (i <- 0 until numRefs(tag)) readNat()
+ if (tag == TEMPLATE) scanTrees(buf, end, MemberDefsOnly)
+ if (mode != NoMemberDefs) buf += new OwnerTree(start, tag, fork, end)
+ goto(end)
+ case tag =>
+ if (mode == MemberDefsOnly) skipTree(tag)
+ else if (tag >= firstLengthTreeTag) {
+ val end = readEnd()
+ var nrefs = numRefs(tag)
+ if (nrefs < 0) {
+ for (i <- nrefs until 0) scanTree(buf)
+ goto(end)
+ }
+ else {
+ for (i <- 0 until nrefs) readNat()
+ scanTrees(buf, end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) { readNat(); scanTree(buf) }
+ else if (tag >= firstASTTreeTag) scanTree(buf)
+ else if (tag >= firstNatTreeTag) readNat()
+ }
+ }
+
+ /** Record all directly nested definitions and templates between current address and `end`
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTrees(buf: ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode = AllDefs): Unit = {
+ while (currentAddr.index < end.index) scanTree(buf, mode)
+ assert(currentAddr.index == end.index)
+ }
+
+ /** The next tag, following through SHARED tags */
+ def nextUnsharedTag: Int = {
+ val tag = nextByte
+ if (tag == SHARED) {
+ val lookAhead = fork
+ lookAhead.reader.readByte()
+ forkAt(lookAhead.reader.readAddr()).nextUnsharedTag
+ }
+ else tag
+ }
+
+ def readName(): TermName = toTermName(readNameRef())
+
+ def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
+ tastyName(readNameRef()) match {
+ case Signed(original, params, result) =>
+ var sig = Signature(params map toTypeName, toTypeName(result))
+ if (sig == Signature.NotAMethod) sig = Signature.NotAMethod
+ (toTermName(original), sig)
+ case name =>
+ toTermName(name)
+ }
+
+// ------ Reading types -----------------------------------------------------
+
+ /** Read names in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamNames(end: Addr): List[Name] =
+ until(end) {
+ val name = readName()
+ skipTree()
+ name
+ }
+
+ /** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
+ until(end) { readNat(); readType().asInstanceOf[T] }
+
+ /** Read referece to definition and return symbol created at that definition */
+ def readSymRef()(implicit ctx: Context): Symbol = symbolAt(readAddr())
+
+ /** The symbol at given address; createa new one if none exists yet */
+ def symbolAt(addr: Addr)(implicit ctx: Context): Symbol = symAtAddr.get(addr) match {
+ case Some(sym) =>
+ sym
+ case None =>
+ val sym = forkAt(addr).createSymbol()(ctx.withOwner(ownerTree.findOwner(addr)))
+ ctx.log(i"forward reference to $sym")
+ sym
+ }
+
+ /** The symbol defined by current definition */
+ def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match {
+ case Some(sym) =>
+ assert(ctx.owner == sym.owner, i"owner discrepancy for $sym, expected: ${ctx.owner}, found: ${sym.owner}")
+ sym
+ case None =>
+ createSymbol()
+ }
+
+ /** Read a type */
+ def readType()(implicit ctx: Context): Type = {
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading type ${astTagToString(tag)} at $start")
+
+ def registeringType[T](tp: Type, op: => T): T = {
+ typeAtAddr(start) = tp
+ op
+ }
+
+ def readLengthType(): Type = {
+ val end = readEnd()
+
+ def readNamesSkipParams: (List[Name], TreeReader) = {
+ val nameReader = fork
+ nameReader.skipTree() // skip result
+ val paramReader = nameReader.fork
+ (nameReader.readParamNames(end), paramReader)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPERtype =>
+ SuperType(readType(), readType())
+ case REFINEDtype =>
+ var name: Name = readName()
+ val parent = readType()
+ val ttag = nextUnsharedTag
+ if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
+ RefinedType(parent, name, readType())
+ // Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
+ // Eta expansion of the latter puts readType() out of the expression.
+ case APPLIEDtype =>
+ readType().appliedTo(until(end)(readType()))
+ case TYPEBOUNDS =>
+ TypeBounds(readType(), readType())
+ case TYPEALIAS =>
+ val alias = readType()
+ val variance =
+ if (nextByte == COVARIANT) { readByte(); 1 }
+ else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
+ else 0
+ TypeAlias(alias, variance)
+ case ANNOTATEDtype =>
+ AnnotatedType(readType(), Annotation(readTerm()))
+ case ANDtype =>
+ AndType(readType(), readType())
+ case ORtype =>
+ OrType(readType(), readType())
+ case BIND =>
+ val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
+ registerSym(start, sym)
+ TypeRef.withFixedSym(NoPrefix, sym.name, sym)
+ case POLYtype =>
+ val (rawNames, paramReader) = readNamesSkipParams
+ val (variances, paramNames) = rawNames
+ .map(name => (prefixToVariance(name.head), name.tail.toTypeName)).unzip
+ val result = PolyType(paramNames, variances)(
+ pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
+ pt => readType())
+ goto(end)
+ result
+ case METHODtype =>
+ val (names, paramReader) = readNamesSkipParams
+ val result = MethodType(names.map(_.toTermName), paramReader.readParamTypes[Type](end))(
+ mt => registeringType(mt, readType()))
+ goto(end)
+ result
+ case PARAMtype =>
+ readTypeRef() match {
+ case binder: PolyType => PolyParam(binder, readNat())
+ case binder: MethodType => MethodParam(binder, readNat())
+ }
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case ENUMconst =>
+ ConstantType(Constant(readTermRef().termSymbol))
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ def readSimpleType(): Type = (tag: @switch) match {
+ case TYPEREFdirect | TERMREFdirect =>
+ NamedType.withFixedSym(NoPrefix, readSymRef())
+ case TYPEREFsymbol | TERMREFsymbol =>
+ readSymNameRef()
+ case TYPEREFpkg =>
+ readPackageRef().moduleClass.typeRef
+ case TERMREFpkg =>
+ readPackageRef().termRef
+ case TYPEREF =>
+ val name = readName().toTypeName
+ TypeRef(readType(), name)
+ case TERMREF =>
+ readNameSplitSig() match {
+ case name: TermName => TermRef.all(readType(), name)
+ case (name: TermName, sig: Signature) => TermRef.withSig(readType(), name, sig)
+ }
+ case THIS =>
+ ThisType.raw(readType().asInstanceOf[TypeRef])
+ case RECtype =>
+ RecType(rt => registeringType(rt, readType()))
+ case RECthis =>
+ RecThis(readTypeRef().asInstanceOf[RecType])
+ case SHARED =>
+ val ref = readAddr()
+ typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
+ case UNITconst =>
+ ConstantType(Constant(()))
+ case TRUEconst =>
+ ConstantType(Constant(true))
+ case FALSEconst =>
+ ConstantType(Constant(false))
+ case BYTEconst =>
+ ConstantType(Constant(readInt().toByte))
+ case SHORTconst =>
+ ConstantType(Constant(readInt().toShort))
+ case CHARconst =>
+ ConstantType(Constant(readNat().toChar))
+ case INTconst =>
+ ConstantType(Constant(readInt()))
+ case LONGconst =>
+ ConstantType(Constant(readLongInt()))
+ case FLOATconst =>
+ ConstantType(Constant(java.lang.Float.intBitsToFloat(readInt())))
+ case DOUBLEconst =>
+ ConstantType(Constant(java.lang.Double.longBitsToDouble(readLongInt())))
+ case STRINGconst =>
+ ConstantType(Constant(readName().toString))
+ case NULLconst =>
+ ConstantType(Constant(null))
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case BYNAMEtype =>
+ ExprType(readType())
+ }
+
+ if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
+ }
+
+ private def readSymNameRef()(implicit ctx: Context): Type = {
+ val sym = readSymRef()
+ val prefix = readType()
+ val res = NamedType.withSymAndName(prefix, sym, sym.name)
+ prefix match {
+ case prefix: ThisType if prefix.cls eq sym.owner => res.withDenot(sym.denot)
+ // without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
+ // the problem arises when a self type of a trait is a type parameter of the same trait.
+ case _ => res
+ }
+ }
+
+ private def readPackageRef()(implicit ctx: Context): TermSymbol = {
+ val name = readName()
+ if (name == nme.ROOT || name == nme.ROOTPKG) defn.RootPackage
+ else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
+ else ctx.requiredPackage(name)
+ }
+
+ def readTypeRef(): Type =
+ typeAtAddr(readAddr())
+
+ def readTermRef()(implicit ctx: Context): TermRef =
+ readType().asInstanceOf[TermRef]
+
+// ------ Reading definitions -----------------------------------------------------
+
+ private def noRhs(end: Addr): Boolean =
+ currentAddr == end || isModifierTag(nextByte)
+
+ private def localContext(owner: Symbol)(implicit ctx: Context) = {
+ val lctx = ctx.fresh.setOwner(owner)
+ if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
+ }
+
+ private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbsType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
+ val lacksDefinition =
+ rhsIsEmpty &&
+ name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
+ isAbsType
+ var flags = givenFlags
+ if (lacksDefinition && tag != PARAM) flags |= Deferred
+ if (tag == DEFDEF) flags |= Method
+ if (givenFlags is Module)
+ flags = flags | (if (tag == VALDEF) ModuleCreationFlags else ModuleClassCreationFlags)
+ if (ctx.owner.isClass) {
+ if (tag == TYPEPARAM) flags |= Param
+ else if (tag == PARAM) flags |= ParamAccessor
+ }
+ else if (isParamTag(tag)) flags |= Param
+ flags
+ }
+
+ def isAbstractType(ttag: Int)(implicit ctx: Context): Boolean = nextUnsharedTag match {
+ case POLYtpt =>
+ val rdr = fork
+ rdr.reader.readByte() // tag
+ rdr.reader.readNat() // length
+ rdr.skipParams() // tparams
+ rdr.isAbstractType(rdr.nextUnsharedTag)
+ case TYPEBOUNDS | TYPEBOUNDStpt => true
+ case _ => false
+ }
+
+ /** Create symbol of definition node and enter in symAtAddr map
+ * @return the created symbol
+ */
+ def createSymbol()(implicit ctx: Context): Symbol = nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createMemberSymbol()
+ case TEMPLATE =>
+ val localDummy = ctx.newLocalDummy(ctx.owner)
+ registerSym(currentAddr, localDummy)
+ localDummy
+ case tag =>
+ throw new Error(s"illegal createSymbol at $currentAddr, tag = $tag")
+ }
+
+ /** Create symbol of member definition or parameter node and enter in symAtAddr map
+ * @return the created symbol
+ */
+ def createMemberSymbol()(implicit ctx: Context): Symbol = {
+ val start = currentAddr
+ val tag = readByte()
+ val end = readEnd()
+ val rawName = tastyName(readNameRef())
+ var name: Name = toTermName(rawName)
+ if (tag == TYPEDEF || tag == TYPEPARAM) name = name.toTypeName
+ skipParams()
+ val ttag = nextUnsharedTag
+ val isAbsType = isAbstractType(ttag)
+ val isClass = ttag == TEMPLATE
+ val templateStart = currentAddr
+ skipTree() // tpt
+ val rhsStart = currentAddr
+ val rhsIsEmpty = noRhs(end)
+ if (!rhsIsEmpty) skipTree()
+ val (givenFlags, annots, privateWithin) = readModifiers(end)
+ def nameFlags(tname: TastyName): FlagSet = tname match {
+ case TastyName.Expanded(_, original) => ExpandedName | nameFlags(tastyName(original))
+ case TastyName.SuperAccessor(_) => Flags.SuperAccessor
+ case _ => EmptyFlags
+ }
+ pickling.println(i"creating symbol $name at $start with flags $givenFlags")
+ val flags = normalizeFlags(tag, givenFlags | nameFlags(rawName), name, isAbsType, rhsIsEmpty)
+ def adjustIfModule(completer: LazyType) =
+ if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
+ val sym =
+ roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
+ case Some(rootd) =>
+ pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}")
+ rootd.info = adjustIfModule(
+ new Completer(ctx.owner, subReader(start, end)) with SymbolLoaders.SecondCompleter)
+ rootd.flags = flags &~ Touched // allow one more completion
+ rootd.privateWithin = privateWithin
+ seenRoots += rootd.symbol
+ rootd.symbol
+ case _ =>
+ val completer = adjustIfModule(new Completer(ctx.owner, subReader(start, end)))
+ if (isClass)
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord = start.index)
+ else
+ ctx.newSymbol(ctx.owner, name, flags, completer, privateWithin, coord = start.index)
+ } // TODO set position somehow (but take care not to upset Symbol#isDefinedInCurrentRun)
+ sym.annotations = annots
+ ctx.enter(sym)
+ registerSym(start, sym)
+ if (isClass) {
+ sym.completer.withDecls(newScope)
+ forkAt(templateStart).indexTemplateParams()(localContext(sym))
+ }
+ else if (sym.isInlineMethod)
+ sym.addAnnotation(LazyBodyAnnotation { ctx0 =>
+ implicit val ctx: Context = localContext(sym)(ctx0).addMode(Mode.ReadPositions)
+ // avoids space leaks by not capturing the current context
+ forkAt(rhsStart).readTerm()
+ })
+ goto(start)
+ sym
+ }
+
+ /** Read modifier list into triplet of flags, annotations and a privateWithin
+ * boundary symbol.
+ */
+ def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
+ var flags: FlagSet = EmptyFlags
+ var annots = new mutable.ListBuffer[Annotation]
+ var privateWithin: Symbol = NoSymbol
+ while (currentAddr.index != end.index) {
+ def addFlag(flag: FlagSet) = {
+ flags |= flag
+ readByte()
+ }
+ nextByte match {
+ case PRIVATE => addFlag(Private)
+ case INTERNAL => ??? // addFlag(Internal)
+ case PROTECTED => addFlag(Protected)
+ case ABSTRACT =>
+ readByte()
+ nextByte match {
+ case OVERRIDE => addFlag(AbsOverride)
+ case _ => flags |= Abstract
+ }
+ case FINAL => addFlag(Final)
+ case SEALED => addFlag(Sealed)
+ case CASE => addFlag(Case)
+ case IMPLICIT => addFlag(Implicit)
+ case LAZY => addFlag(Lazy)
+ case OVERRIDE => addFlag(Override)
+ case INLINE => addFlag(Inline)
+ case STATIC => addFlag(JavaStatic)
+ case OBJECT => addFlag(Module)
+ case TRAIT => addFlag(Trait)
+ case LOCAL => addFlag(Local)
+ case SYNTHETIC => addFlag(Synthetic)
+ case ARTIFACT => addFlag(Artifact)
+ case MUTABLE => addFlag(Mutable)
+ case LABEL => addFlag(Label)
+ case FIELDaccessor => addFlag(Accessor)
+ case CASEaccessor => addFlag(CaseAccessor)
+ case COVARIANT => addFlag(Covariant)
+ case CONTRAVARIANT => addFlag(Contravariant)
+ case SCALA2X => addFlag(Scala2x)
+ case DEFAULTparameterized => addFlag(DefaultParameterized)
+ case INSUPERCALL => addFlag(InSuperCall)
+ case STABLE => addFlag(Stable)
+ case PRIVATEqualified =>
+ readByte()
+ privateWithin = readType().typeSymbol
+ case PROTECTEDqualified =>
+ addFlag(Protected)
+ privateWithin = readType().typeSymbol
+ case ANNOTATION =>
+ readByte()
+ val end = readEnd()
+ val sym = readType().typeSymbol
+ val lazyAnnotTree = readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+ annots += Annotation.deferred(sym, _ => lazyAnnotTree.complete)
+ case _ =>
+ assert(false, s"illegal modifier tag at $currentAddr")
+ }
+ }
+ (flags, annots.toList, privateWithin)
+ }
+
+ /** Create symbols for the definitions in the statement sequence between
+ * current address and `end`.
+ * @return the largest subset of {NoInits, PureInterface} that a
+ * trait owning the indexed statements can have as flags.
+ */
+ def indexStats(end: Addr)(implicit ctx: Context): FlagSet = {
+ var initsFlags = NoInitsInterface
+ while (currentAddr.index < end.index) {
+ nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ val sym = symbolAtCurrent()
+ skipTree()
+ if (sym.isTerm && !sym.is(MethodOrLazyOrDeferred))
+ initsFlags = EmptyFlags
+ else if (sym.isClass ||
+ sym.is(Method, butNot = Deferred) && !sym.isConstructor)
+ initsFlags &= NoInits
+ case IMPORT =>
+ skipTree()
+ case PACKAGE =>
+ processPackage { (pid, end) => implicit ctx => indexStats(end) }
+ case _ =>
+ skipTree()
+ initsFlags = EmptyFlags
+ }
+ }
+ assert(currentAddr.index == end.index)
+ initsFlags
+ }
+
+ /** Process package with given operation `op`. The operation takes as arguments
+ * - a `RefTree` representing the `pid` of the package,
+ * - an end address,
+ * - a context which has the processd package as owner
+ */
+ def processPackage[T](op: (RefTree, Addr) => Context => T)(implicit ctx: Context): T = {
+ readByte()
+ val end = readEnd()
+ val pid = ref(readTermRef()).asInstanceOf[RefTree]
+ op(pid, end)(localContext(pid.symbol.moduleClass))
+ }
+
+ /** Create symbols the longest consecutive sequence of parameters with given
+ * `tag` starting at current address.
+ */
+ def indexParams(tag: Int)(implicit ctx: Context) =
+ while (nextByte == tag) {
+ symbolAtCurrent()
+ skipTree()
+ }
+
+ /** Create symbols for all type and value parameters of template starting
+ * at current address.
+ */
+ def indexTemplateParams()(implicit ctx: Context) = {
+ assert(readByte() == TEMPLATE)
+ readEnd()
+ indexParams(TYPEPARAM)
+ indexParams(PARAM)
+ }
+
+ /** If definition was already read by a completer, return the previously read tree
+ * or else read definition.
+ */
+ def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
+ case Some(tree) => skipTree(); tree
+ case none => readNewDef()
+ }
+
+ private def readNewDef()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ val sym = symAtAddr(start)
+ val tag = readByte()
+ val end = readEnd()
+
+ def readParamss(implicit ctx: Context): List[List[ValDef]] = {
+ collectWhile(nextByte == PARAMS) {
+ readByte()
+ readEnd()
+ readParams[ValDef](PARAM)
+ }
+ }
+
+ def readRhs(implicit ctx: Context) =
+ if (noRhs(end)) EmptyTree
+ else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+
+ def localCtx = localContext(sym)
+
+ def ValDef(tpt: Tree) =
+ ta.assignType(untpd.ValDef(sym.name.asTermName, tpt, readRhs(localCtx)), sym)
+
+ def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
+ ta.assignType(
+ untpd.DefDef(
+ sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
+ sym)
+
+ def TypeDef(rhs: Tree) =
+ ta.assignType(untpd.TypeDef(sym.name.asTypeName, rhs), sym)
+
+ def ta = ctx.typeAssigner
+
+ val name = readName()
+ pickling.println(s"reading def of $name at $start")
+ val tree: MemberDef = tag match {
+ case DEFDEF =>
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val vparamss = readParamss(localCtx)
+ val tpt = readTpt()
+ val typeParams = tparams.map(_.symbol)
+ val valueParamss = ctx.normalizeIfConstructor(
+ vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
+ val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe)
+ sym.info = ctx.methodType(typeParams, valueParamss, resType)
+ if (sym.isSetter && sym.accessedFieldOrGetter.is(ParamAccessor)) {
+ // reconstitute ParamAccessor flag of setters for var parameters, which is not pickled
+ sym.setFlag(ParamAccessor)
+ sym.resetFlag(Deferred)
+ }
+ DefDef(tparams, vparamss, tpt)
+ case VALDEF =>
+ val tpt = readTpt()
+ sym.info = tpt.tpe
+ ValDef(tpt)
+ case TYPEDEF | TYPEPARAM =>
+ if (sym.isClass) {
+ val companion = sym.scalacLinkedClass
+
+ // Is the companion defined in the same Tasty file as `sym`?
+ // The only case to check here is if `sym` is a root. In this case
+ // `companion` might have been entered by the environment but it might
+ // be missing from the Tasty file. So we check explicitly for that.
+ def isCodefined =
+ roots.contains(companion.denot) == seenRoots.contains(companion)
+ if (companion.exists && isCodefined) {
+ import transform.SymUtils._
+ if (sym is Flags.ModuleClass) sym.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, companion)
+ else sym.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, companion)
+ }
+ TypeDef(readTemplate(localCtx))
+ } else {
+ val rhs = readTpt()
+ sym.info = rhs.tpe match {
+ case _: TypeBounds | _: ClassInfo => rhs.tpe
+ case _ => TypeAlias(rhs.tpe, sym.variance)
+ }
+ TypeDef(rhs)
+ }
+ case PARAM =>
+ val tpt = readTpt()
+ if (noRhs(end)) {
+ sym.info = tpt.tpe
+ ValDef(tpt)
+ }
+ else {
+ sym.setFlag(Method)
+ sym.info = ExprType(tpt.tpe)
+ pickling.println(i"reading param alias $name -> $currentAddr")
+ DefDef(Nil, Nil, tpt)
+ }
+ }
+ val mods =
+ if (sym.annotations.isEmpty) untpd.EmptyModifiers
+ else untpd.Modifiers(annotations = sym.annotations.map(_.tree))
+ tree.withMods(mods)
+ // record annotations in tree so that tree positions can be filled in.
+ // Note: Once the inline PR with its changes to positions is in, this should be
+ // no longer necessary.
+ goto(end)
+ setPos(start, tree)
+ }
+
+ private def readTemplate(implicit ctx: Context): Template = {
+ val start = currentAddr
+ val cls = ctx.owner.asClass
+ def setClsInfo(parents: List[TypeRef], selfType: Type) =
+ cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
+ val assumedSelfType =
+ if (cls.is(Module) && cls.owner.isClass)
+ TermRef.withSig(cls.owner.thisType, cls.name.sourceModuleName, Signature.NotAMethod)
+ else NoType
+ setClsInfo(Nil, assumedSelfType)
+ val localDummy = symbolAtCurrent()
+ assert(readByte() == TEMPLATE)
+ val end = readEnd()
+ val tparams = readIndexedParams[TypeDef](TYPEPARAM)
+ val vparams = readIndexedParams[ValDef](PARAM)
+ val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) {
+ nextByte match {
+ case APPLY | TYPEAPPLY => readTerm()
+ case _ => readTpt()
+ }
+ }
+ val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
+ val self =
+ if (nextByte == SELFDEF) {
+ readByte()
+ untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
+ }
+ else EmptyValDef
+ setClsInfo(parentRefs, if (self.isEmpty) NoType else self.tpt.tpe)
+ cls.setApplicableFlags(fork.indexStats(end))
+ val constr = readIndexedDef().asInstanceOf[DefDef]
+
+ def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
+ (tparams, stats) match {
+ case (tparam :: tparams1, (alias: TypeDef) :: stats1)
+ if tparam.name == alias.name.expandedName(cls) =>
+ val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
+ (tparam :: alias :: tas, stats2)
+ case _ =>
+ (tparams, stats)
+ }
+
+ val lazyStats = readLater(end, rdr => implicit ctx => {
+ val stats0 = rdr.readIndexedStats(localDummy, end)
+ val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
+ tparamsAndAliases ++ vparams ++ stats
+ })
+ setPos(start,
+ untpd.Template(constr, parents, self, lazyStats)
+ .withType(localDummy.nonMemberTermRef))
+ }
+
+ def skipToplevel()(implicit ctx: Context): Unit= {
+ if (!isAtEnd)
+ nextByte match {
+ case IMPORT | PACKAGE =>
+ skipTree()
+ skipToplevel()
+ case _ =>
+ }
+ }
+
+ def readTopLevel()(implicit ctx: Context): List[Tree] = {
+ @tailrec def read(acc: ListBuffer[Tree]): List[Tree] = nextByte match {
+ case IMPORT | PACKAGE =>
+ acc += readIndexedStat(NoSymbol)
+ if (!isAtEnd) read(acc) else acc.toList
+ case _ => // top-level trees which are not imports or packages are not part of tree
+ acc.toList
+ }
+ read(new ListBuffer[tpd.Tree])
+ }
+
+ def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
+ case TYPEDEF | VALDEF | DEFDEF =>
+ readIndexedDef()
+ case IMPORT =>
+ readImport()
+ case PACKAGE =>
+ val start = currentAddr
+ processPackage { (pid, end) => implicit ctx =>
+ setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
+ }
+ case _ =>
+ readTerm()(ctx.withOwner(exprOwner))
+ }
+
+ def readImport()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ readByte()
+ readEnd()
+ val expr = readTerm()
+ def readSelectors(): List[untpd.Tree] = nextByte match {
+ case IMPORTED =>
+ val start = currentAddr
+ readByte()
+ val from = setPos(start, untpd.Ident(readName()))
+ nextByte match {
+ case RENAMED =>
+ val start2 = currentAddr
+ readByte()
+ val to = setPos(start2, untpd.Ident(readName()))
+ untpd.Thicket(from, to) :: readSelectors()
+ case _ =>
+ from :: readSelectors()
+ }
+ case _ =>
+ Nil
+ }
+ setPos(start, Import(expr, readSelectors()))
+ }
+
+ def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
+ until(end)(readIndexedStat(exprOwner))
+
+ def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
+ fork.indexStats(end)
+ readIndexedStats(exprOwner, end)
+ }
+
+ def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
+ collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
+
+ def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = {
+ fork.indexParams(tag)
+ readIndexedParams(tag)
+ }
+
+// ------ Reading trees -----------------------------------------------------
+
+ def readTerm()(implicit ctx: Context): Tree = { // TODO: rename to readTree
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading term ${astTagToString(tag)} at $start")
+
+ def readPathTerm(): Tree = {
+ goto(start)
+ readType() match {
+ case path: TypeRef => TypeTree(path)
+ case path: TermRef => ref(path)
+ case path: ThisType => This(path.cls)
+ case path: ConstantType => Literal(path.value)
+ }
+ }
+
+ def completeSelect(name: Name, tpf: Type => Type): Select = {
+ val localCtx =
+ if (name == nme.CONSTRUCTOR) ctx.addMode(Mode.InSuperCall) else ctx
+ val qual = readTerm()(localCtx)
+ val unshadowed = if (name.isShadowedName) name.revertShadowed else name
+ untpd.Select(qual, unshadowed).withType(tpf(qual.tpe.widenIfUnstable))
+ }
+
+ def readQualId(): (untpd.Ident, TypeRef) = {
+ val qual = readTerm().asInstanceOf[untpd.Ident]
+ (untpd.Ident(qual.name).withPos(qual.pos), qual.tpe.asInstanceOf[TypeRef])
+ }
+
+ def readSimpleTerm(): Tree = tag match {
+ case SHARED =>
+ forkAt(readAddr()).readTerm()
+ case IDENT =>
+ untpd.Ident(readName()).withType(readType())
+ case IDENTtpt =>
+ untpd.Ident(readName().toTypeName).withType(readType())
+ case SELECT =>
+ def readRest(name: Name, sig: Signature) =
+ completeSelect(name, TermRef.withSig(_, name.asTermName, sig))
+ readNameSplitSig match {
+ case name: Name => readRest(name, Signature.NotAMethod)
+ case (name: Name, sig: Signature) => readRest(name, sig)
+ }
+ case SELECTtpt =>
+ val name = readName().toTypeName
+ completeSelect(name, TypeRef(_, name))
+ case QUALTHIS =>
+ val (qual, tref) = readQualId()
+ untpd.This(qual).withType(ThisType.raw(tref))
+ case NEW =>
+ New(readTpt())
+ case SINGLETONtpt =>
+ SingletonTypeTree(readTerm())
+ case BYNAMEtpt =>
+ ByNameTypeTree(readTpt())
+ case _ =>
+ readPathTerm()
+ }
+
+ def readLengthTerm(): Tree = {
+ val end = readEnd()
+
+ def localNonClassCtx = {
+ val ctx1 = ctx.fresh.setNewScope
+ if (ctx.owner.isClass) ctx1.setOwner(ctx1.newLocalDummy(ctx.owner)) else ctx1
+ }
+
+ def readBlock(mkTree: (List[Tree], Tree) => Tree): Tree = {
+ val exprReader = fork
+ skipTree()
+ val localCtx = localNonClassCtx
+ val stats = readStats(ctx.owner, end)(localCtx)
+ val expr = exprReader.readTerm()(localCtx)
+ mkTree(stats, expr)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPER =>
+ val qual = readTerm()
+ val (mixId, mixTpe) = ifBefore(end)(readQualId(), (untpd.EmptyTypeIdent, NoType))
+ tpd.Super(qual, mixId, ctx.mode.is(Mode.InSuperCall), mixTpe.typeSymbol)
+ case APPLY =>
+ val fn = readTerm()
+ val isJava = fn.symbol.is(JavaDefined)
+ def readArg() = readTerm() match {
+ case SeqLiteral(elems, elemtpt) if isJava =>
+ JavaSeqLiteral(elems, elemtpt)
+ case arg => arg
+ }
+ tpd.Apply(fn, until(end)(readArg()))
+ case TYPEAPPLY =>
+ tpd.TypeApply(readTerm(), until(end)(readTpt()))
+ case TYPED =>
+ val expr = readTerm()
+ val tpt = readTpt()
+ val expr1 = expr match {
+ case SeqLiteral(elems, elemtpt) if tpt.tpe.isRef(defn.ArrayClass) =>
+ JavaSeqLiteral(elems, elemtpt)
+ case expr => expr
+ }
+ Typed(expr1, tpt)
+ case NAMEDARG =>
+ NamedArg(readName(), readTerm())
+ case ASSIGN =>
+ Assign(readTerm(), readTerm())
+ case BLOCK =>
+ readBlock(Block)
+ case INLINED =>
+ val call = readTerm()
+ readBlock((defs, expr) => Inlined(call, defs.asInstanceOf[List[MemberDef]], expr))
+ case IF =>
+ If(readTerm(), readTerm(), readTerm())
+ case LAMBDA =>
+ val meth = readTerm()
+ val tpt = ifBefore(end)(readTpt(), EmptyTree)
+ Closure(Nil, meth, tpt)
+ case MATCH =>
+ Match(readTerm(), readCases(end))
+ case RETURN =>
+ val from = readSymRef()
+ val expr = ifBefore(end)(readTerm(), EmptyTree)
+ Return(expr, Ident(from.termRef))
+ case TRY =>
+ Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))
+ case REPEATED =>
+ val elemtpt = readTpt()
+ SeqLiteral(until(end)(readTerm()), elemtpt)
+ case BIND =>
+ val name = readName()
+ val info = readType()
+ val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, info)
+ registerSym(start, sym)
+ Bind(sym, readTerm())
+ case ALTERNATIVE =>
+ Alternative(until(end)(readTerm()))
+ case UNAPPLY =>
+ val fn = readTerm()
+ val implicitArgs =
+ collectWhile(nextByte == IMPLICITarg) {
+ readByte()
+ readTerm()
+ }
+ val patType = readType()
+ val argPats = until(end)(readTerm())
+ UnApply(fn, implicitArgs, argPats, patType)
+ case REFINEDtpt =>
+ val refineCls = ctx.newCompleteClassSymbol(
+ ctx.owner, tpnme.REFINE_CLASS, Fresh, parents = Nil)
+ typeAtAddr(start) = refineCls.typeRef
+ val parent = readTpt()
+ val refinements = readStats(refineCls, end)(localContext(refineCls))
+ RefinedTypeTree(parent, refinements, refineCls)
+ case APPLIEDtpt =>
+ AppliedTypeTree(readTpt(), until(end)(readTpt()))
+ case ANDtpt =>
+ AndTypeTree(readTpt(), readTpt())
+ case ORtpt =>
+ OrTypeTree(readTpt(), readTpt())
+ case ANNOTATEDtpt =>
+ Annotated(readTpt(), readTerm())
+ case POLYtpt =>
+ val localCtx = localNonClassCtx
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val body = readTpt()(localCtx)
+ PolyTypeTree(tparams, body)
+ case TYPEBOUNDStpt =>
+ TypeBoundsTree(readTpt(), readTpt())
+ case _ =>
+ readPathTerm()
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
+ tree.overwriteType(tree.tpe.simplified)
+ setPos(start, tree)
+ }
+
+ def readTpt()(implicit ctx: Context) =
+ if (isTypeTreeTag(nextUnsharedTag)) readTerm()
+ else {
+ val start = currentAddr
+ val tp = readType()
+ if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
+ }
+
+ def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
+ collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
+
+ def readCase()(implicit ctx: Context): CaseDef = {
+ val start = currentAddr
+ readByte()
+ val end = readEnd()
+ val pat = readTerm()
+ val rhs = readTerm()
+ val guard = ifBefore(end)(readTerm(), EmptyTree)
+ setPos(start, CaseDef(pat, guard, rhs))
+ }
+
+ def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
+ val localReader = fork
+ goto(end)
+ new LazyReader(localReader, op)
+ }
+
+// ------ Setting positions ------------------------------------------------
+
+ /** Set position of `tree` at given `addr`. */
+ def setPos[T <: untpd.Tree](addr: Addr, tree: T)(implicit ctx: Context): tree.type =
+ if (ctx.mode.is(Mode.ReadPositions)) {
+ posUnpicklerOpt match {
+ case Some(posUnpickler) =>
+ //println(i"setPos $tree / ${tree.getClass} at $addr to ${posUnpickler.posAt(addr)}")
+ val pos = posUnpickler.posAt(addr)
+ if (pos.exists) tree.setPosUnchecked(pos)
+ tree
+ case _ =>
+ //println(i"no pos $tree")
+ tree
+ }
+ }
+ else tree
+ }
+
+ class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] {
+ def complete(implicit ctx: Context): T = {
+ pickling.println(i"starting to read at ${reader.reader.currentAddr}")
+ op(reader)(ctx.addMode(Mode.AllowDependentFunctions).withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader) extends LazyAnnotation(sym) {
+ def complete(implicit ctx: Context) = {
+ reader.readTerm()(ctx.withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ /** A lazy datastructure that records how definitions are nested in TASTY data.
+ * The structure is lazy because it needs to be computed only for forward references
+ * to symbols that happen before the referenced symbol is created (see `symbolAt`).
+ * Such forward references are rare.
+ *
+ * @param addr The address of tree representing an owning definition, NoAddr for root tree
+ * @param tag The tag at `addr`. Used to determine which subtrees to scan for children
+ * (i.e. if `tag` is template, don't scan member defs, as these belong already
+ * to enclosing class).
+ * @param reader The reader to be used for scanning for children
+ * @param end The end of the owning definition
+ */
+ class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr) {
+
+ /** All definitions that have the definition at `addr` as closest enclosing definition */
+ lazy val children: List[OwnerTree] = {
+ val buf = new ListBuffer[OwnerTree]
+ reader.scanTrees(buf, end, if (tag == TEMPLATE) NoMemberDefs else AllDefs)
+ buf.toList
+ }
+
+ /** Find the owner of definition at `addr` */
+ def findOwner(addr: Addr)(implicit ctx: Context): Symbol = {
+ def search(cs: List[OwnerTree], current: Symbol): Symbol =
+ try cs match {
+ case ot :: cs1 =>
+ if (ot.addr.index == addr.index)
+ current
+ else if (ot.addr.index < addr.index && addr.index < ot.end.index)
+ search(ot.children, reader.symbolAt(ot.addr))
+ else
+ search(cs1, current)
+ case Nil =>
+ throw new TreeWithoutOwner
+ }
+ catch {
+ case ex: TreeWithoutOwner =>
+ println(i"no owner for $addr among $cs") // DEBUG
+ throw ex
+ }
+ search(children, NoSymbol)
+ }
+
+ override def toString = s"OwnerTree(${addr.index}, ${end.index}"
+ }
+}
+
+object TreeUnpickler {
+
+ /** An enumeration indicating which subtrees should be added to an OwnerTree. */
+ type MemberDefMode = Int
+ final val MemberDefsOnly = 0 // add only member defs; skip other statements
+ final val NoMemberDefs = 1 // add only statements that are not member defs
+ final val AllDefs = 2 // add everything
+
+ class TreeWithoutOwner extends Exception
+}
+
+