aboutsummaryrefslogtreecommitdiff
path: root/src/dotty/tools/dotc/core/tasty
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2015-04-29 10:36:16 +0200
committerMartin Odersky <odersky@gmail.com>2015-05-02 19:23:08 +0200
commit60ab9f8f525d319aa5b6d5052018c6781da036eb (patch)
treefbc5096a1f7193a4970226a7ad6f03fbf7670a4b /src/dotty/tools/dotc/core/tasty
parentac46a0e4489bba4f76863cc1491bf2b8441ed1cb (diff)
downloaddotty-60ab9f8f525d319aa5b6d5052018c6781da036eb.tar.gz
dotty-60ab9f8f525d319aa5b6d5052018c6781da036eb.tar.bz2
dotty-60ab9f8f525d319aa5b6d5052018c6781da036eb.zip
Pickling modularization reorg
The pickling package got rather large and confusing with three separate tasks that each had their own conventions: read JVM classfiles, read Scala2 pickle info, read Tasty. The classes for each task are now in separate packages.
Diffstat (limited to 'src/dotty/tools/dotc/core/tasty')
-rw-r--r--src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala60
-rw-r--r--src/dotty/tools/dotc/core/tasty/NameBuffer.scala93
-rw-r--r--src/dotty/tools/dotc/core/tasty/PositionPickler.scala75
-rw-r--r--src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala38
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyBuffer.scala188
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyFormat.scala491
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyName.scala30
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyPickler.scala70
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyPrinter.scala122
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyReader.scala141
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala95
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreeBuffer.scala179
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreePickler.scala551
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala889
14 files changed, 3022 insertions, 0 deletions
diff --git a/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
new file mode 100644
index 000000000..ccd3f78e8
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -0,0 +1,60 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, SymDenotations._
+import dotty.tools.dotc.ast.tpd
+import TastyUnpickler._, TastyBuffer._
+import util.Positions._
+import util.{SourceFile, NoSource}
+import PositionUnpickler._
+import classfile.ClassfileParser
+
+object DottyUnpickler {
+
+ /** Exception thrown if classfile is corrupted */
+ class BadSignature(msg: String) extends RuntimeException(msg)
+}
+
+/** A class for unpickling Tasty trees and symbols.
+ * @param bytes the bytearray containing the Tasty file from which we unpickle
+ */
+class DottyUnpickler(bytes: Array[Byte]) extends ClassfileParser.Embedded {
+ import tpd._
+
+ private val unpickler = new TastyUnpickler(bytes)
+ private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler).get
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
+ treeUnpickler.enterTopLevel(roots)
+
+ /** The unpickled trees, and the source file they come from
+ * @param readPositions if true, trees get decorated with position information.
+ */
+ def body(readPositions: Boolean = false)(implicit ctx: Context): (List[Tree], SourceFile) = {
+ val source = unpickler.unpickle(new SourceFileUnpickler).getOrElse(NoSource)
+ if (readPositions)
+ for ((totalRange, positions) <- unpickler.unpickle(new PositionsSectionUnpickler))
+ treeUnpickler.usePositions(totalRange, positions)
+ (treeUnpickler.unpickle(), source)
+ }
+
+ private class SourceFileUnpickler extends SectionUnpickler[SourceFile]("Sourcefile") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new SourceFile(tastyName(reader.readNameRef()).toString, Seq())
+ }
+
+ private class TreeSectionUnpickler extends SectionUnpickler[TreeUnpickler]("ASTs") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new TreeUnpickler(reader, tastyName)
+ }
+
+ private class PositionsSectionUnpickler extends SectionUnpickler[(Position, AddrToPosition)]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new PositionUnpickler(reader).unpickle()
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
new file mode 100644
index 000000000..69fd63805
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -0,0 +1,93 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import collection.mutable
+import Names.{Name, chrs}
+import Decorators._, NameOps._
+import TastyBuffer._
+import scala.io.Codec
+import TastyName._
+import TastyFormat._
+
+class NameBuffer extends TastyBuffer(10000) {
+
+ private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef]
+
+ def nameIndex(name: TastyName): NameRef = nameRefs.get(name) match {
+ case Some(ref) =>
+ ref
+ case None =>
+ val ref = NameRef(nameRefs.size)
+ nameRefs(name) = ref
+ ref
+ }
+ def nameIndex(name: Name): NameRef = {
+ val tname =
+ if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed))
+ else Simple(name.toTermName)
+ nameIndex(tname)
+ }
+
+ def nameIndex(str: String): NameRef = nameIndex(str.toTermName)
+
+ def fullNameIndex(name: Name): NameRef = {
+ val pos = name.lastIndexOf('.')
+ if (pos > 0)
+ nameIndex(Qualified(fullNameIndex(name.take(pos)), nameIndex(name.drop(pos + 1))))
+ else
+ nameIndex(name)
+ }
+
+ private def withLength(op: => Unit): Unit = {
+ val lengthAddr = currentAddr
+ writeByte(0)
+ op
+ val length = currentAddr.index - lengthAddr.index - 1
+ assert(length < 128)
+ putNat(lengthAddr, length, 1)
+ }
+
+ def writeNameRef(ref: NameRef) = writeNat(ref.index)
+
+ def pickleName(name: TastyName): Unit = name match {
+ case Simple(name) =>
+ val bytes =
+ if (name.length == 0) new Array[Byte](0)
+ else Codec.toUTF8(chrs, name.start, name.length)
+ writeByte(UTF8)
+ writeNat(bytes.length)
+ writeBytes(bytes, bytes.length)
+ case Qualified(qualified, selector) =>
+ writeByte(QUALIFIED)
+ withLength { writeNameRef(qualified); writeNameRef(selector) }
+ case Signed(original, params, result) =>
+ writeByte(SIGNED)
+ withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) }
+ case Expanded(prefix, original) =>
+ writeByte(EXPANDED)
+ withLength { writeNameRef(prefix); writeNameRef(original) }
+ case ModuleClass(module) =>
+ writeByte(OBJECTCLASS)
+ withLength { writeNameRef(module) }
+ case SuperAccessor(accessed) =>
+ writeByte(SUPERACCESSOR)
+ withLength { writeNameRef(accessed) }
+ case DefaultGetter(method, paramNumber) =>
+ writeByte(DEFAULTGETTER)
+ withLength { writeNameRef(method); writeNat(paramNumber) }
+ case Shadowed(original) =>
+ writeByte(SHADOWED)
+ withLength { writeNameRef(original) }
+ }
+
+ override def assemble(): Unit = {
+ var i = 0
+ for ((name, ref) <- nameRefs) {
+ assert(ref.index == i)
+ i += 1
+ pickleName(name)
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
new file mode 100644
index 000000000..b0550b70a
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -0,0 +1,75 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast.tpd._
+import ast.Trees.WithLazyField
+import TastyFormat._
+import core._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._
+import collection.mutable
+import TastyBuffer._
+import util.Positions._
+
+object PositionPickler {
+
+ trait DeferredPosition {
+ var parentPos: Position = NoPosition
+ }
+
+ def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit =
+ if (parentPos.exists)
+ x match {
+ case x: Tree @unchecked =>
+ op(x, parentPos)
+ x match {
+ case x: MemberDef @unchecked => traverse(x.symbol.annotations, x.pos, op)
+ case _ =>
+ }
+ traverse(x.productIterator, x.pos, op)
+ case x: DeferredPosition =>
+ x.parentPos = parentPos
+ case xs: TraversableOnce[_] =>
+ xs.foreach(traverse(_, parentPos, op))
+ case _ =>
+ }
+}
+import PositionPickler._
+
+class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) {
+ val buf = new TastyBuffer(5000)
+ pickler.newSection("Positions", buf)
+ import buf._
+
+ def picklePositions(roots: List[Tree], totalRange: Position)(implicit ctx: Context) = {
+ var lastIndex = 0
+ def record(tree: Tree, parentPos: Position): Unit =
+ if (tree.pos.exists) {
+ def msg = s"failure to pickle $tree at ${tree.pos}, parent = $parentPos"
+ val endPos = tree.pos.end min parentPos.end
+ // end positions can be larger than their parents
+ // e.g. in the case of synthetic empty ranges, which are placed at the next token after
+ // the current construct.
+ val endDelta = endPos - parentPos.end
+ val startPos =
+ if (endDelta == 0) tree.pos.start max parentPos.start else tree.pos.start min endPos
+ // Since end positions are corrected above, start positions have to follow suit.
+ val startDelta = startPos - parentPos.start
+ if (startDelta != 0 || endDelta != 0)
+ for (addr <- addrOfTree(tree)) {
+ buf.writeInt(addr.index - lastIndex)
+ lastIndex = addr.index
+ if (startDelta != 0) buf.writeInt(startDelta)
+ if (endDelta != 0) {
+ assert(endDelta < 0, msg)
+ buf.writeInt(endDelta)
+ } else
+ assert(startDelta >= 0, msg)
+ }
+ }
+
+ buf.writeNat(totalRange.end)
+ traverse(roots, totalRange, record)
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
new file mode 100644
index 000000000..fa80a2769
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
@@ -0,0 +1,38 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+
+import util.Positions._
+import collection.mutable
+import TastyBuffer.Addr
+
+object PositionUnpickler {
+ type AddrToPosition = mutable.HashMap[Addr, Position]
+}
+
+/** Unpickler for tree positions */
+class PositionUnpickler(reader: TastyReader) {
+ import PositionUnpickler._
+ import reader._
+
+ def unpickle(): (Position, AddrToPosition) = {
+ val positions = new mutable.HashMap[Addr, Position] // Dotty deviation: Can't use new AddrToPosition here. TODO: fix this!
+ val sourceLength = readNat()
+ def readDelta() = if (isAtEnd) 0 else readInt()
+ var curIndex: Addr = Addr(readDelta())
+ while (!isAtEnd) {
+ val delta1 = readDelta()
+ val delta2 = readDelta()
+ val (startDelta, endDelta, indexDelta) =
+ if (delta2 <= 0) (delta1, -delta2, readDelta())
+ else if (delta1 < 0) (0, -delta1, delta2)
+ else (delta1, 0, delta2)
+ positions(curIndex) = Position(startDelta, endDelta, startDelta)
+ // make non-synthetic position; will be made synthetic by normalization.
+ curIndex += indexDelta
+ }
+ (Position(0, sourceLength), positions)
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala b/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
new file mode 100644
index 000000000..07442f63b
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.dble
+
+object TastyBuffer {
+
+ /** The number of digits of the natural number `nat`, written in base 128 format. */
+ def natSize(nat: Int): Int =
+ if (nat < 128) 1 else natSize(nat >>> 7) + 1
+
+ /** An address pointing to an index in a Tasty buffer's byte array */
+ case class Addr(val index: Int) extends AnyVal {
+ def - (delta: Int): Addr = Addr(this.index - delta)
+ def + (delta: Int): Addr = Addr(this.index + delta)
+
+ def relativeTo(base: Addr): Addr = this - base.index - AddrWidth
+ }
+
+ val NoAddr = Addr(-1)
+
+ /** The maximal number of address bytes.
+ * Since addresses are written as base-128 natural numbers,
+ * the value of 4 gives a maximal array size of 256M.
+ */
+ final val AddrWidth = 4
+}
+import TastyBuffer._
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ */
+class TastyBuffer(initialSize: Int) {
+
+ /** The current byte array, will be expanded as needed */
+ var bytes = new Array[Byte](initialSize)
+
+ /** The number of bytes written */
+ var length = 0
+
+ // -- Output routines --------------------------------------------
+
+ /** Write a byte of data. */
+ def writeByte(b: Int): Unit = {
+ if (length >= bytes.length)
+ bytes = dble(bytes)
+ bytes(length) = b.toByte
+ length += 1
+ }
+
+ /** Write the first `n` bytes of `data`. */
+ def writeBytes(data: Array[Byte], n: Int): Unit = {
+ while (bytes.length < length + n) bytes = dble(bytes)
+ Array.copy(data, 0, bytes, length, n)
+ length += n
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int): Unit =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /** Write a natural number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeInt(x: Int): Unit =
+ writeLongInt(x)
+
+ /**
+ * Like writeNat, but for longs. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Like writeInt, but for longs */
+ def writeLongInt(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Write an uncompressed Long stored in 8 bytes in big endian format */
+ def writeUncompressedLong(x: Long): Unit = {
+ var y = x
+ val bytes = new Array[Byte](8)
+ for (i <- 7 to 0 by -1) {
+ bytes(i) = (y & 0xff).toByte
+ y = y >>> 8
+ }
+ writeBytes(bytes, 8)
+ }
+
+ // -- Address handling --------------------------------------------
+
+ /** Write natural number `x` right-adjusted in a field of `width` bytes
+ * starting with address `at`.
+ */
+ def putNat(at: Addr, x: Int, width: Int): Unit = {
+ var y = x
+ var w = width
+ if(at.index + w >= bytes.length)
+ bytes = dble(bytes)
+ var digit = y & 0x7f | 0x80
+ while (w > 0) {
+ w -= 1
+ bytes(at.index + w) = digit.toByte
+ y >>>= 7
+ digit = y & 0x7f
+ }
+ assert(y == 0, s"number $x too large to fit in $width bytes")
+ }
+
+ /** The byte at given address */
+ def getByte(at: Addr): Int = bytes(at.index)
+
+ /** The natural number at address `at` */
+ def getNat(at: Addr): Int = getLongNat(at).toInt
+
+ /** The long natural number at address `at` */
+ def getLongNat(at: Addr): Long = {
+ var b = 0L
+ var x = 0L
+ var idx = at.index
+ do {
+ b = bytes(idx)
+ x = (x << 7) | (b & 0x7f)
+ idx += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** The address (represented as a natural number) at address `at` */
+ def getAddr(at: Addr) = Addr(getNat(at))
+
+ /** The smallest address equal to or following `at` which points to a non-zero byte */
+ final def skipZeroes(at: Addr): Addr =
+ if (getByte(at) != 0) at else skipZeroes(at + 1)
+
+ /** The address after the natural number found at address `at`. */
+ final def skipNat(at: Addr): Addr = {
+ val next = at + 1
+ if ((getByte(at) & 0x80) != 0) next else skipNat(next)
+ }
+
+ /** The address referring to the end of data written so far */
+ def currentAddr: Addr = Addr(length)
+
+ /** Reserve `AddrWidth` bytes to write an address into */
+ def reserveAddr(): Addr = {
+ val result = currentAddr
+ length += AddrWidth
+ result
+ }
+
+ /** Fill reserved space at address `at` with address `target` */
+ def fillAddr(at: Addr, target: Addr) =
+ putNat(at, target.index, AddrWidth)
+
+ /** Write address without leading zeroes */
+ def writeAddr(addr: Addr): Unit = writeNat(addr.index)
+
+ // -- Finalization --------------------------------------------
+
+ /** Hook to be overridden in subclasses.
+ * Perform all actions necessary to assemble the final byte array.
+ * After `assemble` no more output actions to this buffer are permitted.
+ */
+ def assemble(): Unit = ()
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
new file mode 100644
index 000000000..106a6510d
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
@@ -0,0 +1,491 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+/************************************************************
+Notation:
+
+We use BNF notation. Terminal symbols start with at least two
+consecutive upper case letters. Each terminal is represented as a
+single byte tag. Non-terminals are mixed case. Prefixes of the form
+lower case letter*_ are for explanation of semantic content only, they
+can be dropped without changing the grammar.
+
+Micro-syntax:
+
+ LongInt = Digit* StopDigit // big endian 2's complement, value fits in a Long w/o overflow
+ Int = LongInt // big endian 2's complement, fits in an Int w/o overflow
+ Nat = LongInt // non-negative value, fits in an Int without overflow
+ Digit = 0 | ... | 127
+ StopDigit = 128 | ... | 255 // value = digit - 128
+
+Macro-format:
+
+ File = Header majorVersion_Nat minorVersion_Nat UUID
+ nameTable_Length Name* Section*
+ Header = 0x5CA1AB1F
+ UUID = Byte*16 // random UUID
+
+ Section = NameRef Length Bytes
+ Length = Nat // length of rest of entry in bytes
+
+ Name = UTF8 Length UTF8-CodePoint*
+ QUALIFIED Length qualified_NameRef selector_NameRef
+ SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef*
+ EXPANDED Length original_NameRef
+ OBJECTCLASS Length module_NameRef
+ SUPERACCESSOR Length accessed_NameRef
+ DEFAULTGETTER Length method_NameRef paramNumber_Nat
+ SHADOWED Length original_NameRef
+ MANGLED Length mangle_NameRef name_NameRef
+ ...
+
+ NameRef = Nat // ordinal number of name in name table, starting from 1.
+
+Note: Unqualified names in the name table are strings. The context decides whether a name is
+a type-name or a term-name. The same string can represent both.
+
+Standard-Section: "ASTs" TopLevelStat*
+
+ TopLevelStat = PACKAGE Length Path TopLevelStat*
+ Stat
+
+ Stat = Term
+ VALDEF Length NameRef Type rhs_Term? Modifier*
+ DEFDEF Length NameRef TypeParam* Params* return_Type rhs_Term?
+ Modifier*
+ TYPEDEF Length NameRef (Type | Template) Modifier*
+ IMPORT Length qual_Term Selector*
+ Selector = IMPORTED name_NameRef
+ RENAMED Length from_NameRef to_NameRef
+ // Imports are for scala.meta, they are not used in the backend
+
+ TypeParam = TYPEPARAM Length NameRef Type Modifier*
+ Params = PARAMS Length Param*
+ Param = PARAM Length NameRef Type rhs_Term? Modifier* // rhs_Term is present in the case of an aliased class parameter
+ Template = TEMPLATE Length TypeParam* Param* Parent* Self? Stat* // Stat* always starts with the primary constructor.
+ Parent = Application
+ Type
+ Self = SELFDEF selfName_NameRef selfType_Type
+
+ Term = Path
+ Application
+ IDENT NameRef Type // used when ident’s type is not a TermRef
+ SELECT possiblySigned_NameRef qual_Term
+ NEW cls_Type
+ SUPER Length this_Term mixinTrait_Type?
+ PAIR Length left_Term right_Term
+ TYPED Length expr_Term ascription_Type
+ NAMEDARG Length paramName_NameRef arg_Term
+ ASSIGN Length lhs_Term rhs_Term
+ BLOCK Length expr_Term Stat*
+ LAMBDA Length meth_Term target_Type
+ IF Length cond_Term then_Term else_Term
+ MATCH Length sel_Term CaseDef*
+ TRY Length expr_Term CaseDef* finalizer_Term?
+ RETURN Length meth_ASTRef expr_Term?
+ REPEATED Length elem_Term*
+ BIND Length boundName_NameRef patType_Type pat_Term
+ ALTERNATIVE Length alt_Term*
+ UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term*
+ EMPTYTREE
+ SHARED term_ASTRef
+ Application = APPLY Length fn_Term arg_Term*
+
+ TYPEAPPLY Length fn_Term arg_Type*
+ CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree?
+ ImplicitArg = IMPLICITARG arg_Term
+ ASTRef = Nat // byte position in AST payload
+
+ Path = Constant
+ TERMREFdirect sym_ASTRef
+ TERMREFsymbol sym_ASTRef qual_Type
+ TERMREFpkg fullyQualified_NameRef
+ TERMREF possiblySigned_NameRef qual_Type
+ THIS clsRef_Type
+ SKOLEMtype refinedType_ASTRef
+ SHARED path_ASTRef
+
+
+ Constant = UNITconst
+ FALSEconst
+ TRUEconst
+ BYTEconst Int
+ SHORTconst Int
+ CHARconst Nat
+ INTconst Int
+ LONGconst LongInt
+ FLOATconst Int
+ DOUBLEconst LongInt
+ STRINGconst NameRef
+ NULLconst
+ CLASSconst Type
+ ENUMconst Path
+
+ Type = Path
+ TYPEREFdirect sym_ASTRef
+ TYPEREFsymbol sym_ASTRef qual_Type
+ TYPEREFpkg fullyQualified_NameRef
+ TYPEREF possiblySigned_NameRef qual_Type
+ SUPERtype Length this_Type underlying_Type
+ REFINEDtype Length underlying_Type refinement_NameRef info_Type
+ APPLIEDtype Length tycon_Type arg_Type*
+ TYPEBOUNDS Length low_Type high_Type
+ TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)?
+ ANNOTATED Length fullAnnotation_Term underlying_Type
+ ANDtype Length left_Type right_Type
+ ORtype Length left_Type right_Type
+ BIND Length boundName_NameRef bounds_Type
+ // for type-variables defined in a type pattern
+ BYNAMEtype underlying_Type
+ POLYtype Length result_Type NamesTypes // needed for refinements
+ METHODtype Length result_Type NamesTypes // needed for refinements
+ PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements
+ SHARED type_ASTRef
+ NamesTypes = ParamType*
+ NameType = paramName_NameRef typeOrBounds_ASTRef
+
+ Modifier = PRIVATE
+ INTERNAL // package private
+ PROTECTED
+ PRIVATEqualified qualifier_Type // will be dropped
+ PROTECTEDqualified qualifier_Type // will be dropped
+ ABSTRACT
+ FINAL
+ SEALED
+ CASE
+ IMPLICIT
+ LAZY
+ OVERRIDE
+ INLINE // macro
+ ABSOVERRIDE // abstract override
+ STATIC // mapped to static Java member
+ OBJECT // an object or its class
+ TRAIT // a trait
+ LOCAL // private[this] or protected[this]
+ SYNTHETIC // generated by Scala compiler
+ ARTIFACT // to be tagged Java Synthetic
+ MUTABLE // a var
+ LABEL // method generated as a label
+ FIELDaccessor // getter or setter
+ CASEaccessor // getter for case class param
+ COVARIANT // type param marked “+”
+ CONTRAVARIANT // type param marked “-”
+ SCALA2X // Imported from Scala2.x
+ DEFAULTparameterized // Method with default params
+ INSUPERCALL // defined in the argument of a constructor supercall
+ Annotation
+ Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term
+
+Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way.
+
+ Category 1 (tags 0-63) : tag
+ Category 2 (tags 64-95) : tag Nat
+ Category 3 (tags 96-111) : tag AST
+ Category 4 (tags 112-127): tag Nat AST
+ Category 5 (tags 128-255): tag Length <payload>
+
+Standard Section: "Sourcefile" sourcefile_NameRef
+
+Standard Section: "Positions" sourceLength_Nat Assoc*
+
+ Assoc = addr_Delta offset_Delta offset_Delta?
+ // addr_Delta :
+ // Difference of address to last recorded node.
+ // All but the first addr_Deltas are > 0, the first is >= 0.
+ // 2nd offset_Delta:
+ // Difference of end offset of addressed node vs parent node. Always <= 0
+ // 1st offset Delta, if delta >= 0 or 2nd offset delta exists
+ // Difference of start offset of addressed node vs parent node.
+ // 1st offset Delta, if delta < 0 and 2nd offset delta does not exist:
+ // Difference of end offset of addressed node vs parent node.
+ // Offsets and addresses are difference encoded.
+ // Nodes which have the same positions as their parents are omitted.
+ Delta = Int // Difference between consecutive offsets / tree addresses,
+
+**************************************************************************************/
+
+object TastyFormat {
+
+ final val header = Array(0x5C, 0xA1, 0xAB, 0x1F)
+ final val MajorVersion = 0
+ final val MinorVersion = 5
+
+ // Name tags
+
+ final val UTF8 = 1
+ final val QUALIFIED = 2
+ final val SIGNED = 3
+ final val EXPANDED = 4
+ final val OBJECTCLASS = 5
+ final val SUPERACCESSOR = 6
+ final val DEFAULTGETTER = 7
+ final val SHADOWED = 8
+
+// AST tags
+
+ final val UNITconst = 2
+ final val FALSEconst = 3
+ final val TRUEconst = 4
+ final val NULLconst = 5
+ final val PRIVATE = 6
+ final val INTERNAL = 7
+ final val PROTECTED = 8
+ final val ABSTRACT = 9
+ final val FINAL = 10
+ final val SEALED = 11
+ final val CASE = 12
+ final val IMPLICIT = 13
+ final val LAZY = 14
+ final val OVERRIDE = 15
+ final val INLINE = 16
+ final val ABSOVERRIDE = 17
+ final val STATIC = 18
+ final val OBJECT = 19
+ final val TRAIT = 20
+ final val LOCAL = 21
+ final val SYNTHETIC = 22
+ final val ARTIFACT = 23
+ final val MUTABLE = 24
+ final val LABEL = 25
+ final val FIELDaccessor = 26
+ final val CASEaccessor = 27
+ final val COVARIANT = 28
+ final val CONTRAVARIANT = 29
+ final val SCALA2X = 30
+ final val DEFAULTparameterized = 31
+ final val INSUPERCALL = 32
+
+ final val SHARED = 64
+ final val TERMREFdirect = 65
+ final val TYPEREFdirect = 66
+ final val TERMREFpkg = 67
+ final val TYPEREFpkg = 68
+ final val SKOLEMtype = 69
+ final val BYTEconst = 70
+ final val SHORTconst = 71
+ final val CHARconst = 72
+ final val INTconst = 73
+ final val LONGconst = 74
+ final val FLOATconst = 75
+ final val DOUBLEconst = 76
+ final val STRINGconst = 77
+ final val IMPORTED = 78
+
+ final val THIS = 96
+ final val CLASSconst = 97
+ final val ENUMconst = 98
+ final val BYNAMEtype = 99
+ final val NEW = 100
+ final val IMPLICITarg = 101
+ final val PRIVATEqualified = 102
+ final val PROTECTEDqualified = 103
+
+ final val IDENT = 112
+ final val SELECT = 113
+ final val TERMREFsymbol = 114
+ final val TERMREF = 115
+ final val TYPEREFsymbol = 116
+ final val TYPEREF = 117
+ final val SELFDEF = 118
+
+ final val PACKAGE = 128
+ final val VALDEF = 129
+ final val DEFDEF = 130
+ final val TYPEDEF = 131
+ final val IMPORT = 132
+ final val TYPEPARAM = 133
+ final val PARAMS = 134
+ final val PARAM = 136
+ final val RENAMED = 138
+ final val APPLY = 139
+ final val TYPEAPPLY = 140
+ final val PAIR = 142
+ final val TYPED = 143
+ final val NAMEDARG = 144
+ final val ASSIGN = 145
+ final val BLOCK = 146
+ final val IF = 147
+ final val LAMBDA = 148
+ final val MATCH = 149
+ final val RETURN = 150
+ final val TRY = 151
+ final val REPEATED = 153
+ final val BIND = 154
+ final val ALTERNATIVE = 155
+ final val UNAPPLY = 156
+ final val ANNOTATED = 157
+ final val CASEDEF = 158
+ final val TEMPLATE = 160
+ final val SUPER = 163
+ final val SUPERtype = 166
+ final val REFINEDtype = 167
+ final val APPLIEDtype = 168
+ final val TYPEBOUNDS = 169
+ final val TYPEALIAS = 170
+ final val ANDtype = 171
+ final val ORtype = 172
+ final val METHODtype = 174
+ final val POLYtype = 175
+ final val PARAMtype = 176
+ final val ANNOTATION = 178
+
+ final val firstSimpleTreeTag = UNITconst
+ final val firstNatTreeTag = SHARED
+ final val firstASTTreeTag = THIS
+ final val firstNatASTTreeTag = IDENT
+ final val firstLengthTreeTag = PACKAGE
+
+ def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM
+
+ def isModifierTag(tag: Int) = tag match {
+ case PRIVATE
+ | INTERNAL
+ | PROTECTED
+ | ABSTRACT
+ | FINAL
+ | SEALED
+ | CASE
+ | IMPLICIT
+ | LAZY
+ | OVERRIDE
+ | INLINE
+ | ABSOVERRIDE
+ | STATIC
+ | OBJECT
+ | TRAIT
+ | LOCAL
+ | SYNTHETIC
+ | ARTIFACT
+ | MUTABLE
+ | LABEL
+ | FIELDaccessor
+ | CASEaccessor
+ | COVARIANT
+ | CONTRAVARIANT
+ | SCALA2X
+ | DEFAULTparameterized
+ | INSUPERCALL
+ | ANNOTATION
+ | PRIVATEqualified
+ | PROTECTEDqualified => true
+ case _ => false
+ }
+
+ def nameTagToString(tag: Int): String = tag match {
+ case UTF8 => "UTF8"
+ case QUALIFIED => "QUALIFIED"
+ case SIGNED => "SIGNED"
+ case EXPANDED => "EXPANDED"
+ case OBJECTCLASS => "OBJECTCLASS"
+ case SUPERACCESSOR => "SUPERACCESSOR"
+ case DEFAULTGETTER => "DEFAULTGETTER"
+ }
+
+ def astTagToString(tag: Int): String = tag match {
+ case UNITconst => "UNITconst"
+ case FALSEconst => "FALSEconst"
+ case TRUEconst => "TRUEconst"
+ case NULLconst => "NULLconst"
+ case PRIVATE => "PRIVATE"
+ case INTERNAL => "INTERNAL"
+ case PROTECTED => "PROTECTED"
+ case ABSTRACT => "ABSTRACT"
+ case FINAL => "FINAL"
+ case SEALED => "SEALED"
+ case CASE => "CASE"
+ case IMPLICIT => "IMPLICIT"
+ case LAZY => "LAZY"
+ case OVERRIDE => "OVERRIDE"
+ case INLINE => "INLINE"
+ case ABSOVERRIDE => "ABSOVERRIDE"
+ case STATIC => "STATIC"
+ case OBJECT => "OBJECT"
+ case TRAIT => "TRAIT"
+ case LOCAL => "LOCAL"
+ case SYNTHETIC => "SYNTHETIC"
+ case ARTIFACT => "ARTIFACT"
+ case MUTABLE => "MUTABLE"
+ case LABEL => "LABEL"
+ case FIELDaccessor => "FIELDaccessor"
+ case CASEaccessor => "CASEaccessor"
+ case COVARIANT => "COVARIANT"
+ case CONTRAVARIANT => "CONTRAVARIANT"
+ case SCALA2X => "SCALA2X"
+ case DEFAULTparameterized => "DEFAULTparameterized"
+ case INSUPERCALL => "INSUPERCALL"
+
+ case SHARED => "SHARED"
+ case TERMREFdirect => "TERMREFdirect"
+ case TYPEREFdirect => "TYPEREFdirect"
+ case TERMREFpkg => "TERMREFpkg"
+ case TYPEREFpkg => "TYPEREFpkg"
+ case SKOLEMtype => "SKOLEMtype"
+ case BYTEconst => "BYTEconst"
+ case SHORTconst => "SHORTconst"
+ case CHARconst => "CHARconst"
+ case INTconst => "INTconst"
+ case LONGconst => "LONGconst"
+ case FLOATconst => "FLOATconst"
+ case DOUBLEconst => "DOUBLEconst"
+ case STRINGconst => "STRINGconst"
+
+ case IDENT => "IDENT"
+ case SELECT => "SELECT"
+ case TERMREFsymbol => "TERMREFsymbol"
+ case TERMREF => "TERMREF"
+ case TYPEREFsymbol => "TYPEREFsymbol"
+ case TYPEREF => "TYPEREF"
+
+ case PACKAGE => "PACKAGE"
+ case VALDEF => "VALDEF"
+ case DEFDEF => "DEFDEF"
+ case TYPEDEF => "TYPEDEF"
+ case IMPORT => "IMPORT"
+ case TYPEPARAM => "TYPEPARAM"
+ case PARAMS => "PARAMS"
+ case PARAM => "PARAM"
+ case IMPORTED => "IMPORTED"
+ case RENAMED => "RENAMED"
+ case APPLY => "APPLY"
+ case TYPEAPPLY => "TYPEAPPLY"
+ case NEW => "NEW"
+ case PAIR => "PAIR"
+ case TYPED => "TYPED"
+ case NAMEDARG => "NAMEDARG"
+ case ASSIGN => "ASSIGN"
+ case BLOCK => "BLOCK"
+ case IF => "IF"
+ case LAMBDA => "LAMBDA"
+ case MATCH => "MATCH"
+ case RETURN => "RETURN"
+ case TRY => "TRY"
+ case REPEATED => "REPEATED"
+ case BIND => "BIND"
+ case ALTERNATIVE => "ALTERNATIVE"
+ case UNAPPLY => "UNAPPLY"
+ case ANNOTATED => "ANNOTATED"
+ case CASEDEF => "CASEDEF"
+ case IMPLICITarg => "IMPLICITarg"
+ case TEMPLATE => "TEMPLATE"
+ case SELFDEF => "SELFDEF"
+ case THIS => "THIS"
+ case SUPER => "SUPER"
+ case CLASSconst => "CLASSconst"
+ case ENUMconst => "ENUMconst"
+ case SUPERtype => "SUPERtype"
+ case REFINEDtype => "REFINEDtype"
+ case APPLIEDtype => "APPLIEDtype"
+ case TYPEBOUNDS => "TYPEBOUNDS"
+ case TYPEALIAS => "TYPEALIAS"
+ case ANDtype => "ANDtype"
+ case ORtype => "ORtype"
+ case BYNAMEtype => "BYNAMEtype"
+ case POLYtype => "POLYtype"
+ case METHODtype => "METHODtype"
+ case PARAMtype => "PARAMtype"
+ case ANNOTATION => "ANNOTATION"
+ case PRIVATEqualified => "PRIVATEqualified"
+ case PROTECTEDqualified => "PROTECTEDqualified"
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyName.scala b/src/dotty/tools/dotc/core/tasty/TastyName.scala
new file mode 100644
index 000000000..6d37c8bca
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyName.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import core.Names.TermName
+import collection.mutable
+
+abstract class TastyName
+
+object TastyName {
+
+ case class NameRef(val index: Int) extends AnyVal
+
+ case class Simple(name: TermName) extends TastyName
+ case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName
+ case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
+ case class Expanded(prefix: NameRef, original: NameRef) extends TastyName
+ case class ModuleClass(module: NameRef) extends TastyName
+ case class SuperAccessor(accessed: NameRef) extends TastyName
+ case class DefaultGetter(method: NameRef, num: Int) extends TastyName
+ case class Shadowed(original: NameRef) extends TastyName
+
+ class Table extends (NameRef => TastyName) {
+ private val names = new mutable.ArrayBuffer[TastyName]
+ def add(name: TastyName) = names += name
+ def apply(ref: NameRef) = names(ref.index)
+ def contents: Iterable[TastyName] = names
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
new file mode 100644
index 000000000..f53a97c0c
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -0,0 +1,70 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyFormat._
+import collection.mutable
+import TastyBuffer._
+import java.util.UUID
+import core.Symbols.Symbol
+import ast.tpd
+
+class TastyPickler {
+
+ private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)]
+
+ private val headerBuffer = {
+ val buf = new TastyBuffer(24)
+ for (ch <- header) buf.writeByte(ch.toByte)
+ buf.writeNat(MajorVersion)
+ buf.writeNat(MinorVersion)
+ val uuid = UUID.randomUUID()
+ buf.writeUncompressedLong(uuid.getMostSignificantBits)
+ buf.writeUncompressedLong(uuid.getLeastSignificantBits)
+ buf
+ }
+
+ val nameBuffer = new NameBuffer
+
+ def newSection(name: String, buf: TastyBuffer) =
+ sections += ((nameBuffer.nameIndex(name), buf))
+
+ def assembleParts(): Array[Byte] = {
+ def lengthWithLength(buf: TastyBuffer) = {
+ buf.assemble()
+ buf.length + natSize(buf.length)
+ }
+ val totalSize =
+ headerBuffer.length +
+ lengthWithLength(nameBuffer) + {
+ for ((nameRef, buf) <- sections) yield
+ natSize(nameRef.index) + lengthWithLength(buf)
+ }.sum
+ val all = new TastyBuffer(totalSize)
+ all.writeBytes(headerBuffer.bytes, headerBuffer.length)
+ all.writeNat(nameBuffer.length)
+ all.writeBytes(nameBuffer.bytes, nameBuffer.length)
+ for ((nameRef, buf) <- sections) {
+ all.writeNat(nameRef.index)
+ all.writeNat(buf.length)
+ all.writeBytes(buf.bytes, buf.length)
+ }
+ assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}")
+ all.bytes
+ }
+
+ /**
+ * Addresses in TASTY file of trees, stored by pickling.
+ * Note that trees are checked for reference equality,
+ * so one can reliably use this function only directly after `pickler`
+ */
+ var addrOfTree: tpd.Tree => Option[Addr] = (_ => None)
+
+ /**
+ * Addresses in TASTY file of symbols, stored by pickling.
+ * Note that trees are checked for reference equality,
+ * so one can reliably use this function only dirrectly after `pickler`
+ */
+ var addrOfSym: Symbol => Option[Addr] = (_ => None)
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
new file mode 100644
index 000000000..915ae3f21
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -0,0 +1,122 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import Contexts._, Decorators._
+import printing.Texts._
+import TastyName._
+import StdNames._
+import TastyUnpickler._
+import TastyBuffer.Addr
+import util.Positions.{Position, offsetToInt}
+import collection.mutable
+
+class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
+
+ val unpickler = new TastyUnpickler(bytes)
+ import unpickler.{tastyName, unpickle}
+
+ def nameToString(name: TastyName): String = name match {
+ case Simple(name) => name.toString
+ case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name)
+ case Signed(original, params, result) =>
+ i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}"
+ case Expanded(prefix, original) => s"$prefix${nme.EXPAND_SEPARATOR}$original"
+ case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS"
+ case SuperAccessor(accessed) => nameRefToString(accessed) + "/SUPERACCESSOR"
+ case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num
+ case Shadowed(original) => nameRefToString(original) + "/SHADOWED"
+ }
+
+ def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref))
+
+ def printNames() =
+ for ((name, idx) <- tastyName.contents.zipWithIndex)
+ println(f"$idx%4d: " + nameToString(name))
+
+ def printContents(): Unit = {
+ println("Names:")
+ printNames()
+ println("Trees:")
+ unpickle(new TreeSectionUnpickler)
+ unpickle(new PositionSectionUnpickler)
+ }
+
+ class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") {
+ import TastyFormat._
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ import reader._
+ var indent = 0
+ def newLine() = print(f"\n ${index(currentAddr) - index(startAddr)}%5d:" + " " * indent)
+ def printNat() = print(" " + readNat())
+ def printName() = {
+ val idx = readNat()
+ print(" ") ;print(idx); print("["); print(nameRefToString(NameRef(idx))); print("]")
+ }
+ def printTree(): Unit = {
+ newLine()
+ val tag = readByte()
+ print(" ");print(astTagToString(tag))
+ indent += 2
+ if (tag >= firstLengthTreeTag) {
+ val len = readNat()
+ print(s"($len)")
+ val end = currentAddr + len
+ def printTrees() = until(end)(printTree())
+ tag match {
+ case RENAMED =>
+ printName(); printName()
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
+ printName(); printTrees()
+ case REFINEDtype =>
+ printTree(); printName(); printTrees()
+ case RETURN =>
+ printNat(); printTrees()
+ case METHODtype | POLYtype =>
+ printTree()
+ until(end) { printName(); printTree() }
+ case PARAMtype =>
+ printNat(); printNat()
+ case _ =>
+ printTrees()
+ }
+ if (currentAddr != end) {
+ println(s"incomplete read, current = $currentAddr, end = $end")
+ goto(end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) {
+ tag match {
+ case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName()
+ case _ => printNat()
+ }
+ printTree()
+ }
+ else if (tag >= firstASTTreeTag)
+ printTree()
+ else if (tag >= firstNatTreeTag)
+ tag match {
+ case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName()
+ case _ => printNat()
+ }
+ indent -= 2
+ }
+ println(i"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr")
+ println(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr")
+ while (!isAtEnd) {
+ printTree()
+ newLine()
+ }
+ }
+ }
+
+ class PositionSectionUnpickler extends SectionUnpickler[Unit]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ print(s"${reader.endAddr.index - reader.currentAddr.index}")
+ val (totalRange, positions) = new PositionUnpickler(reader).unpickle()
+ println(s" position bytes in $totalRange:")
+ val sorted = positions.toSeq.sortBy(_._1.index)
+ for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}")
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyReader.scala b/src/dotty/tools/dotc/core/tasty/TastyReader.scala
new file mode 100644
index 000000000..311097148
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyReader.scala
@@ -0,0 +1,141 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyBuffer._
+import TastyName.NameRef
+import collection.mutable
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ *
+ * @param bytes The array containing data
+ * @param from The position from which to read
+ * @param end The position one greater than the last byte to be read
+ * @param base The index referenced by the logical zero address Addr(0)
+ */
+class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) {
+
+ def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length)
+
+ private var bp: Int = start
+
+ def addr(idx: Int) = Addr(idx - base)
+ def index(addr: Addr) = addr.index + base
+
+ /** The address of the first byte to read, respectively byte that was read */
+ def startAddr: Addr = addr(start)
+
+ /** The address of the next byte to read */
+ def currentAddr: Addr = addr(bp)
+
+ /** the address one greater than the last brte to read */
+ def endAddr: Addr = addr(end)
+
+ /** Have all bytes been read? */
+ def isAtEnd: Boolean = bp == end
+
+ /** A new reader over the same array with the same address base, but with
+ * specified start and end positions
+ */
+ def subReader(start: Addr, end: Addr): TastyReader =
+ new TastyReader(bytes, index(start), index(end), base)
+
+ /** Read a byte of data. */
+ def readByte(): Int = {
+ val result = bytes(bp) & 0xff
+ bp += 1
+ result
+ }
+
+ /** Returns the next byte of data as a natural number without advancing the read position */
+ def nextByte: Int = bytes(bp) & 0xff
+
+ /** Read the next `n` bytes of `data`. */
+ def readBytes(n: Int): Array[Byte] = {
+ val result = new Array[Byte](n)
+ Array.copy(bytes, bp, result, 0, n)
+ bp += n
+ result
+ }
+
+ /** Read a natural number fitting in an Int in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readNat(): Int = readLongNat.toInt
+
+ /** Read an integer number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readInt(): Int = readLongInt.toInt
+
+ /** Read a natural number fitting in a Long in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** Read a long integer number in 2's complement big endian format, base 128. */
+ def readLongInt(): Long = {
+ var b = bytes(bp)
+ var x: Long = (b << 1).toByte >> 1 // sign extend with bit 6.
+ bp += 1
+ while ((b & 0x80) == 0) {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ }
+ x
+ }
+
+ /** Read an uncompressed Long stored in 8 bytes in big endian format */
+ def readUncompressedLong(): Long = {
+ var x = 0
+ for (i <- 0 to 7)
+ x = (x << 8) | (readByte() & 0xff)
+ x
+ }
+
+ /** Read a natural number and return as a NameRef */
+ def readNameRef() = NameRef(readNat())
+
+ /** Read a natural number and return as an address */
+ def readAddr() = Addr(readNat())
+
+ /** Read a length number and return the absolute end address implied by it,
+ * given as <address following length field> + <length-value-read>.
+ */
+ def readEnd(): Addr = addr(readNat() + bp)
+
+ /** Set read position to the one pointed to by `addr` */
+ def goto(addr: Addr): Unit =
+ bp = index(addr)
+
+ /** Perform `op` until `end` address is reached and collect results in a list. */
+ def until[T](end: Addr)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (bp < index(end)) buf += op
+ assert(bp == index(end))
+ buf.toList
+ }
+
+ /** If before given `end` address, the result of `op`, otherwise `default` */
+ def ifBefore[T](end: Addr)(op: => T, default: T): T =
+ if (bp < index(end)) op else default
+
+ /** Perform `op` while cindition `cond` holds and collect results in a list. */
+ def collectWhile[T](cond: => Boolean)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (cond) buf += op
+ buf.toList
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
new file mode 100644
index 000000000..8a1f58acd
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
@@ -0,0 +1,95 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import scala.collection.mutable
+import TastyFormat._
+import Names.{Name, termName}
+import java.util.UUID
+
+object TastyUnpickler {
+ class UnpickleException(msg: String) extends Exception(msg)
+
+ abstract class SectionUnpickler[R](val name: String) {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): R
+ }
+}
+
+import TastyUnpickler._
+
+class TastyUnpickler(reader: TastyReader) {
+ import reader._
+
+ def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
+
+ private val sectionReader = new mutable.HashMap[String, TastyReader]
+ val tastyName = new TastyName.Table
+
+ def check(cond: Boolean, msg: => String) =
+ if (!cond) throw new UnpickleException(msg)
+
+ def readString(): String = {
+ val TastyName.Simple(name) = tastyName(readNameRef())
+ name.toString
+ }
+
+ def readName(): TastyName = {
+ import TastyName._
+ val tag = readByte()
+ val length = readNat()
+ val start = currentAddr
+ val end = start + length
+ val result = tag match {
+ case UTF8 =>
+ goto(end)
+ Simple(termName(bytes, start.index, length))
+ case QUALIFIED =>
+ Qualified(readNameRef(), readNameRef())
+ case SIGNED =>
+ val original = readNameRef()
+ val result = readNameRef()
+ val params = until(end)(readNameRef())
+ Signed(original, params, result)
+ case EXPANDED =>
+ Expanded(readNameRef(), readNameRef())
+ case OBJECTCLASS =>
+ ModuleClass(readNameRef())
+ case SUPERACCESSOR =>
+ SuperAccessor(readNameRef())
+ case DEFAULTGETTER =>
+ DefaultGetter(readNameRef(), readNat())
+ case SHADOWED =>
+ Shadowed(readNameRef())
+ }
+ assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
+ result
+ }
+
+ private def readHeader(): UUID = {
+ for (i <- 0 until header.length)
+ check(readByte() == header(i), "not a TASTy file")
+ val major = readNat()
+ val minor = readNat()
+ check(major == MajorVersion && minor <= MinorVersion,
+ s"""TASTy signature has wrong version.
+ | expected: $MajorVersion.$MinorVersion
+ | found : $major.$minor""".stripMargin)
+ new UUID(readUncompressedLong(), readUncompressedLong())
+ }
+
+ val uuid = readHeader()
+
+ locally {
+ until(readEnd()) { tastyName.add(readName()) }
+ while (!isAtEnd) {
+ val secName = readString()
+ val secEnd = readEnd()
+ sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index)
+ goto(secEnd)
+ }
+ }
+
+ def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
+ for (reader <- sectionReader.get(sec.name)) yield
+ sec.unpickle(reader, tastyName)
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
new file mode 100644
index 000000000..6aba44d3b
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
@@ -0,0 +1,179 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.{bestFit, dble}
+import TastyBuffer.{Addr, AddrWidth}
+import config.Printers.pickling
+import ast.tpd.Tree
+
+class TreeBuffer extends TastyBuffer(50000) {
+
+ private final val ItemsOverOffsets = 2
+ private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
+ private var offsets = new Array[Int](initialOffsetSize)
+ private var isRelative = new Array[Boolean](initialOffsetSize)
+ private var delta: Array[Int] = _
+ private var numOffsets = 0
+
+ private[tasty] val pickledTrees = new java.util.IdentityHashMap[Tree, Any] // Value type is really Addr, but that's not compatible with null
+
+ def addrOfTree(tree: Tree): Option[Addr] = pickledTrees.get(tree) match {
+ case null => None
+ case n => Some(n.asInstanceOf[Addr])
+ }
+
+ private def offset(i: Int): Addr = Addr(offsets(i))
+
+ private def keepOffset(relative: Boolean): Unit = {
+ if (numOffsets == offsets.length) {
+ offsets = dble(offsets)
+ isRelative = dble(isRelative)
+ }
+ offsets(numOffsets) = length
+ isRelative(numOffsets) = relative
+ numOffsets += 1
+ }
+
+ /** Reserve space for a reference, to be adjusted later */
+ def reserveRef(relative: Boolean): Addr = {
+ val addr = currentAddr
+ keepOffset(relative)
+ reserveAddr()
+ addr
+ }
+
+ /** Write reference right adjusted into freshly reserved field. */
+ def writeRef(target: Addr) = {
+ keepOffset(relative = false)
+ fillAddr(reserveAddr(), target)
+ }
+
+ /** Fill previously reserved field with a reference */
+ def fillRef(at: Addr, target: Addr, relative: Boolean) = {
+ val addr = if (relative) target.relativeTo(at) else target
+ fillAddr(at, addr)
+ }
+
+ /** The amount by which the bytes at the given address are shifted under compression */
+ def deltaAt(at: Addr): Int = {
+ val idx = bestFit(offsets, numOffsets, at.index - 1)
+ if (idx < 0) 0 else delta(idx)
+ }
+
+ /** The address to which `x` is translated under compression */
+ def adjusted(x: Addr): Addr = x - deltaAt(x)
+
+ /** Compute all shift-deltas */
+ private def computeDeltas() = {
+ delta = new Array[Int](numOffsets)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val off = offset(i)
+ val skippedOff = skipZeroes(off)
+ val skippedCount = skippedOff.index - off.index
+ assert(skippedCount < AddrWidth, s"unset field at position $off")
+ lastDelta += skippedCount
+ delta(i) = lastDelta
+ i += 1
+ }
+ }
+
+ /** The absolute or relative adjusted address at index `i` of `offsets` array*/
+ private def adjustedOffset(i: Int): Addr = {
+ val at = offset(i)
+ val original = getAddr(at)
+ if (isRelative(i)) {
+ val start = skipNat(at)
+ val len1 = original + delta(i) - deltaAt(original + start.index)
+ val len2 = adjusted(original + start.index) - adjusted(start).index
+ assert(len1 == len2,
+ s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2")
+ len1
+ } else adjusted(original)
+ }
+
+ /** Adjust all offsets according to previously computed deltas */
+ private def adjustOffsets(): Unit = {
+ for (i <- 0 until numOffsets) {
+ val corrected = adjustedOffset(i)
+ fillAddr(offset(i), corrected)
+ }
+ }
+
+ /** Adjust deltas to also take account references that will shrink (and thereby
+ * generate additional zeroes that can be skipped) due to previously
+ * computed adjustements.
+ */
+ private def adjustDeltas(): Int = {
+ val delta1 = new Array[Int](delta.length)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val corrected = adjustedOffset(i)
+ lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index)
+ delta1(i) = lastDelta
+ i += 1
+ }
+ val saved =
+ if (numOffsets == 0) 0
+ else delta1(numOffsets - 1) - delta(numOffsets - 1)
+ delta = delta1
+ saved
+ }
+
+ /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */
+ private def compress(): Int = {
+ var lastDelta = 0
+ var start = 0
+ var i = 0
+ var wasted = 0
+ def shift(end: Int) =
+ Array.copy(bytes, start, bytes, start - lastDelta, end - start)
+ while (i < numOffsets) {
+ val next = offsets(i)
+ shift(next)
+ start = next + delta(i) - lastDelta
+ val pastZeroes = skipZeroes(Addr(next)).index
+ assert(pastZeroes >= start, s"something's wrong: eliminated non-zero")
+ wasted += (pastZeroes - start)
+ lastDelta = delta(i)
+ i += 1
+ }
+ shift(length)
+ length -= lastDelta
+ wasted
+ }
+
+ def adjustPickledTrees(): Unit = {
+ val it = pickledTrees.keySet.iterator
+ while (it.hasNext) {
+ val tree = it.next
+ pickledTrees.put(tree, adjusted(pickledTrees.get(tree).asInstanceOf[Addr]))
+ }
+ }
+
+ /** Final assembly, involving the following steps:
+ * - compute deltas
+ * - adjust deltas until additional savings are < 1% of total
+ * - adjust offsets according to the adjusted deltas
+ * - shrink buffer, skipping zeroes.
+ */
+ def compactify(): Unit = {
+ val origLength = length
+ computeDeltas()
+ //println(s"offsets: ${offsets.take(numOffsets).deep}")
+ //println(s"deltas: ${delta.take(numOffsets).deep}")
+ var saved = 0
+ do {
+ saved = adjustDeltas()
+ pickling.println(s"adjusting deltas, saved = $saved")
+ } while (saved > 0 && length / saved < 100)
+ adjustOffsets()
+ adjustPickledTrees()
+ val wasted = compress()
+ pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now.
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala
new file mode 100644
index 000000000..365b5d268
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -0,0 +1,551 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast.Trees._
+import TastyFormat._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._
+import collection.mutable
+import NameOps._
+import TastyBuffer._
+
+class TreePickler(pickler: TastyPickler) {
+ val buf = new TreeBuffer
+ pickler.newSection("ASTs", buf)
+ import buf._
+ import pickler.nameBuffer.{nameIndex, fullNameIndex}
+ import ast.tpd._
+
+ private val symRefs = new mutable.HashMap[Symbol, Addr]
+ private val forwardSymRefs = new mutable.HashMap[Symbol, List[Addr]]
+ private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null
+
+ private def withLength(op: => Unit) = {
+ val lengthAddr = reserveRef(relative = true)
+ op
+ fillRef(lengthAddr, currentAddr, relative = true)
+ }
+
+ def addrOfSym(sym: Symbol): Option[Addr] = {
+ symRefs.get(sym)
+ }
+
+ def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case tree: MemberDef =>
+ if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr
+ case _ =>
+ }
+
+ def registerDef(sym: Symbol): Unit = {
+ symRefs(sym) = currentAddr
+ forwardSymRefs.get(sym) match {
+ case Some(refs) =>
+ refs.foreach(fillRef(_, currentAddr, relative = false))
+ forwardSymRefs -= sym
+ case None =>
+ }
+ }
+
+ private def pickleName(name: Name): Unit = writeNat(nameIndex(name).index)
+ private def pickleName(name: TastyName): Unit = writeNat(nameIndex(name).index)
+ private def pickleNameAndSig(name: Name, sig: Signature) = {
+ val Signature(params, result) = sig
+ pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result)))
+ }
+
+ private def pickleName(sym: Symbol)(implicit ctx: Context): Unit =
+ if (sym is Flags.ExpandedName)
+ pickleName(TastyName.Expanded(
+ nameIndex(sym.name.expandedPrefix), nameIndex(sym.name.unexpandedName)))
+ else pickleName(sym.name)
+
+ private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) =>
+ if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
+ case None =>
+ ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
+ pickleForwardSymRef(sym)
+ }
+
+ private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = {
+ val ref = reserveRef(relative = false)
+ assert(!sym.is(Flags.Package), sym)
+ forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
+ }
+
+ private def isLocallyDefined(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) => assert(sym.exists); label != NoAddr
+ case None => false
+ }
+
+ def pickle(trees: List[Tree])(implicit ctx: Context) = {
+
+ def qualifiedName(sym: Symbol): TastyName =
+ if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName)
+ else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name))
+
+ def pickleConstant(c: Constant): Unit = c.tag match {
+ case UnitTag =>
+ writeByte(UNITconst)
+ case BooleanTag =>
+ writeByte(if (c.booleanValue) TRUEconst else FALSEconst)
+ case ByteTag =>
+ writeByte(BYTEconst)
+ writeInt(c.byteValue)
+ case ShortTag =>
+ writeByte(SHORTconst)
+ writeInt(c.shortValue)
+ case CharTag =>
+ writeByte(CHARconst)
+ writeNat(c.charValue)
+ case IntTag =>
+ writeByte(INTconst)
+ writeInt(c.intValue)
+ case LongTag =>
+ writeByte(LONGconst)
+ writeLongInt(c.longValue)
+ case FloatTag =>
+ writeByte(FLOATconst)
+ writeInt(java.lang.Float.floatToRawIntBits(c.floatValue))
+ case DoubleTag =>
+ writeByte(DOUBLEconst)
+ writeLongInt(java.lang.Double.doubleToRawLongBits(c.doubleValue))
+ case StringTag =>
+ writeByte(STRINGconst)
+ writeNat(nameIndex(c.stringValue).index)
+ case NullTag =>
+ writeByte(NULLconst)
+ case ClazzTag =>
+ writeByte(CLASSconst)
+ pickleType(c.typeValue)
+ case EnumTag =>
+ writeByte(ENUMconst)
+ pickleType(c.symbolValue.termRef)
+ }
+
+ def pickleType(tpe0: Type, richTypes: Boolean = false): Unit = try {
+ val tpe = tpe0.stripTypeVar
+ val prev = pickledTypes.get(tpe)
+ if (prev == null) {
+ pickledTypes.put(tpe, currentAddr)
+ pickleNewType(tpe, richTypes)
+ }
+ else {
+ writeByte(SHARED)
+ writeRef(prev.asInstanceOf[Addr])
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(i"error when pickling type $tpe0")
+ throw ex
+ }
+
+ def pickleNewType(tpe: Type, richTypes: Boolean): Unit = try { tpe match {
+ case ConstantType(value) =>
+ pickleConstant(value)
+ case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
+ pickleType(tpe.info.bounds.hi)
+ case tpe: WithFixedSym =>
+ val sym = tpe.symbol
+ if (sym.is(Flags.Package)) {
+ writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg)
+ pickleName(qualifiedName(sym))
+ }
+ else {
+ assert(tpe.prefix == NoPrefix)
+ def pickleRef() = {
+ writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
+ pickleSymRef(sym)
+ }
+ if (sym is Flags.BindDefinedType) {
+ registerDef(sym)
+ writeByte(BIND)
+ withLength {
+ pickleName(sym.name)
+ pickleType(sym.info)
+ pickleRef()
+ }
+ }
+ else pickleRef()
+ }
+ case tpe: TermRefWithSignature =>
+ if (tpe.symbol.is(Flags.Package)) picklePackageRef(tpe.symbol)
+ else {
+ writeByte(TERMREF)
+ pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix)
+ }
+ case tpe: NamedType =>
+ if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda)
+ // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will
+ // be reconstituted when unpickling.
+ pickleType(tpe.prefix)
+ else if (isLocallyDefined(tpe.symbol)) {
+ writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol)
+ pickleSymRef(tpe.symbol); pickleType(tpe.prefix)
+ }
+ else {
+ writeByte(if (tpe.isType) TYPEREF else TERMREF)
+ pickleName(tpe.name); pickleType(tpe.prefix)
+ }
+ case tpe: ThisType =>
+ if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot)
+ picklePackageRef(tpe.cls)
+ else {
+ writeByte(THIS)
+ pickleType(tpe.tref)
+ }
+ case tpe: SuperType =>
+ writeByte(SUPERtype)
+ withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)}
+ case tpe: SkolemType =>
+ writeByte(SKOLEMtype)
+ writeRef(pickledTypes.get(tpe.binder).asInstanceOf[Addr])
+ case tpe: RefinedType =>
+ val args = tpe.argInfos(interpolate = false)
+ if (args.isEmpty) {
+ writeByte(REFINEDtype)
+ withLength {
+ pickleType(tpe.parent)
+ pickleName(tpe.refinedName)
+ pickleType(tpe.refinedInfo, richTypes = true)
+ }
+ }
+ else {
+ writeByte(APPLIEDtype)
+ withLength { pickleType(tpe.withoutArgs(args)); args.foreach(pickleType(_)) }
+ }
+ case tpe: TypeAlias =>
+ writeByte(TYPEALIAS)
+ withLength {
+ pickleType(tpe.alias, richTypes)
+ tpe.variance match {
+ case 1 => writeByte(COVARIANT)
+ case -1 => writeByte(CONTRAVARIANT)
+ case 0 =>
+ }
+ }
+ case tpe: TypeBounds =>
+ writeByte(TYPEBOUNDS)
+ withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) }
+ case tpe: AnnotatedType =>
+ writeByte(ANNOTATED)
+ withLength { pickleTree(tpe.annot.tree); pickleType(tpe.tpe, richTypes) }
+ case tpe: AndOrType =>
+ writeByte(if (tpe.isAnd) ANDtype else ORtype)
+ withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) }
+ case tpe: ExprType =>
+ writeByte(BYNAMEtype)
+ pickleType(tpe.underlying)
+ case tpe: MethodType if richTypes =>
+ writeByte(METHODtype)
+ pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes)
+ case tpe: PolyType if richTypes =>
+ writeByte(POLYtype)
+ pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramBounds)
+ case tpe: PolyParam =>
+ if (!pickleParamType(tpe))
+ // TODO figure out why this case arises in e.g. pickling AbstractFileReader.
+ ctx.typerState.constraint.entry(tpe) match {
+ case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes)
+ case _ => assert(false, s"orphan poly parameter: $tpe")
+ }
+ case tpe: MethodParam =>
+ assert(pickleParamType(tpe), s"orphan method parameter: $tpe")
+ case tpe: LazyRef =>
+ pickleType(tpe.ref)
+ }} catch {
+ case ex: AssertionError =>
+ println(i"error while pickling type $tpe")
+ throw ex
+ }
+
+ def picklePackageRef(pkg: Symbol): Unit = {
+ writeByte(TERMREFpkg)
+ pickleName(qualifiedName(pkg))
+ }
+
+ def pickleMethodic(result: Type, names: List[Name], types: List[Type]) =
+ withLength {
+ pickleType(result, richTypes = true)
+ (names, types).zipped.foreach { (name, tpe) =>
+ pickleName(name); pickleType(tpe)
+ }
+ }
+
+ def pickleParamType(tpe: ParamType): Boolean = {
+ val binder = pickledTypes.get(tpe.binder)
+ val pickled = binder != null
+ if (pickled) {
+ writeByte(PARAMtype)
+ withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) }
+ }
+ pickled
+ }
+
+ def pickleTpt(tpt: Tree): Unit = pickleType(tpt.tpe) // TODO correlate with original when generating positions
+
+ def pickleTreeUnlessEmpty(tree: Tree): Unit =
+ if (!tree.isEmpty) pickleTree(tree)
+
+ def pickleTree(tree: Tree): Unit = try {
+ pickledTrees.put(tree, currentAddr)
+ tree match {
+ case Ident(name) =>
+ tree.tpe match {
+ case tp: TermRef => pickleType(tp)
+ case _ =>
+ writeByte(IDENT)
+ pickleName(name)
+ pickleType(tree.tpe)
+ }
+ case This(_) =>
+ pickleType(tree.tpe)
+ case Select(qual, name) =>
+ writeByte(SELECT)
+ val realName = tree.tpe match {
+ case tp: NamedType if tp.name.isShadowedName => tp.name
+ case _ => name
+ }
+ val sig = tree.tpe.signature
+ if (sig == Signature.NotAMethod) pickleName(realName)
+ else pickleNameAndSig(realName, sig)
+ pickleTree(qual)
+ case Apply(fun, args) =>
+ writeByte(APPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTree)
+ }
+ case TypeApply(fun, args) =>
+ writeByte(TYPEAPPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTpt)
+ }
+ case Literal(const1) =>
+ pickleConstant {
+ tree.tpe match {
+ case ConstantType(const2) => const2
+ case _ => const1
+ }
+ }
+ case Super(qual, mix) =>
+ writeByte(SUPER)
+ withLength {
+ pickleTree(qual);
+ if (!mix.isEmpty) {
+ val SuperType(_, mixinType) = tree.tpe
+ pickleType(mixinType)
+ }
+ }
+ case New(tpt) =>
+ writeByte(NEW)
+ pickleTpt(tpt)
+ case Pair(left, right) =>
+ writeByte(PAIR)
+ withLength { pickleTree(left); pickleTree(right) }
+ case Typed(expr, tpt) =>
+ writeByte(TYPED)
+ withLength { pickleTree(expr); pickleTpt(tpt) }
+ case NamedArg(name, arg) =>
+ writeByte(NAMEDARG)
+ withLength { pickleName(name); pickleTree(arg) }
+ case Assign(lhs, rhs) =>
+ writeByte(ASSIGN)
+ withLength { pickleTree(lhs); pickleTree(rhs) }
+ case Block(stats, expr) =>
+ writeByte(BLOCK)
+ stats.foreach(preRegister)
+ withLength { pickleTree(expr); stats.foreach(pickleTree) }
+ case If(cond, thenp, elsep) =>
+ writeByte(IF)
+ withLength{ pickleTree(cond); pickleTree(thenp); pickleTree(elsep) }
+ case Closure(env, meth, tpt) =>
+ writeByte(LAMBDA)
+ assert(env.isEmpty)
+ withLength{
+ pickleTree(meth)
+ if (tpt.tpe.exists) pickleTpt(tpt)
+ }
+ case Match(selector, cases) =>
+ writeByte(MATCH)
+ withLength { pickleTree(selector); cases.foreach(pickleTree) }
+ case CaseDef(pat, guard, rhs) =>
+ writeByte(CASEDEF)
+ withLength { pickleTree(pat); pickleTree(rhs); pickleTreeUnlessEmpty(guard) }
+ case Return(expr, from) =>
+ writeByte(RETURN)
+ withLength { pickleSymRef(from.symbol); pickleTreeUnlessEmpty(expr) }
+ case Try(block, cases, finalizer) =>
+ writeByte(TRY)
+ withLength { pickleTree(block); cases.foreach(pickleTree); pickleTreeUnlessEmpty(finalizer) }
+ case SeqLiteral(elems) =>
+ writeByte(REPEATED)
+ withLength { elems.foreach(pickleTree) }
+ case TypeTree(original) =>
+ pickleTpt(tree)
+ case Bind(name, body) =>
+ registerDef(tree.symbol)
+ writeByte(BIND)
+ withLength { pickleName(name); pickleType(tree.symbol.info); pickleTree(body) }
+ case Alternative(alts) =>
+ writeByte(ALTERNATIVE)
+ withLength { alts.foreach(pickleTree) }
+ case UnApply(fun, implicits, patterns) =>
+ writeByte(UNAPPLY)
+ withLength {
+ pickleTree(fun)
+ for (implicitArg <- implicits) {
+ writeByte(IMPLICITarg)
+ pickleTree(implicitArg)
+ }
+ pickleType(tree.tpe)
+ patterns.foreach(pickleTree)
+ }
+ case tree: ValDef =>
+ pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs)
+ case tree: DefDef =>
+ def pickleAllParams = {
+ pickleParams(tree.tparams)
+ for (vparams <- tree.vparamss) {
+ writeByte(PARAMS)
+ withLength { pickleParams(vparams) }
+ }
+ }
+ pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams)
+ case tree: TypeDef =>
+ pickleDef(TYPEDEF, tree.symbol, tree.rhs)
+ case tree: Template =>
+ registerDef(tree.symbol)
+ writeByte(TEMPLATE)
+ val (params, rest) = tree.body partition {
+ case stat: TypeDef => stat.symbol is Flags.Param
+ case stat: ValOrDefDef =>
+ stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter
+ case _ => false
+ }
+ withLength {
+ pickleParams(params)
+ tree.parents.foreach(pickleTree)
+ val cinfo @ ClassInfo(_, _, _, _, selfInfo) = tree.symbol.owner.info
+ if ((selfInfo ne NoType) || !tree.self.isEmpty) {
+ writeByte(SELFDEF)
+ pickleName(tree.self.name)
+ pickleType {
+ cinfo.selfInfo match {
+ case sym: Symbol => sym.info
+ case tp: Type => tp
+ }
+ }
+ }
+ pickleStats(tree.constr :: rest)
+ }
+ case Import(expr, selectors) =>
+ writeByte(IMPORT)
+ withLength {
+ pickleTree(expr)
+ selectors foreach {
+ case Pair(Ident(from), Ident(to)) =>
+ writeByte(RENAMED)
+ withLength { pickleName(from); pickleName(to) }
+ case Ident(name) =>
+ writeByte(IMPORTED)
+ pickleName(name)
+ }
+ }
+ case PackageDef(pid, stats) =>
+ writeByte(PACKAGE)
+ withLength { pickleType(pid.tpe); pickleStats(stats) }
+ }}
+ catch {
+ case ex: AssertionError =>
+ println(i"error when pickling tree $tree")
+ throw ex
+ }
+
+ def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ()) = {
+ assert(symRefs(sym) == NoAddr)
+ registerDef(sym)
+ writeByte(tag)
+ withLength {
+ pickleName(sym)
+ pickleParams
+ tpt match {
+ case tpt: TypeTree => pickleTpt(tpt)
+ case _ => pickleTree(tpt)
+ }
+ pickleTreeUnlessEmpty(rhs)
+ pickleModifiers(sym)
+ }
+ }
+
+ def pickleParam(tree: Tree): Unit = tree match {
+ case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt)
+ case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs)
+ case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
+ }
+
+ def pickleParams(trees: List[Tree]): Unit = {
+ trees.foreach(preRegister)
+ trees.foreach(pickleParam)
+ }
+
+ def pickleStats(stats: List[Tree]) = {
+ stats.foreach(preRegister)
+ stats.foreach(stat => if (!stat.isEmpty) pickleTree(stat))
+ }
+
+ def pickleModifiers(sym: Symbol): Unit = {
+ import Flags._
+ val flags = sym.flags
+ val privateWithin = sym.privateWithin
+ if (privateWithin.exists) {
+ writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified)
+ pickleType(privateWithin.typeRef)
+ }
+ if (flags is Private) writeByte(PRIVATE)
+ if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED)
+ if ((flags is Final) && !(sym is Module)) writeByte(FINAL)
+ if (flags is Case) writeByte(CASE)
+ if (flags is Override) writeByte(OVERRIDE)
+ if (flags is Inline) writeByte(INLINE)
+ if (flags is JavaStatic) writeByte(STATIC)
+ if (flags is Module) writeByte(OBJECT)
+ if (flags is Local) writeByte(LOCAL)
+ if (flags is Synthetic) writeByte(SYNTHETIC)
+ if (flags is Artifact) writeByte(ARTIFACT)
+ if (flags is Scala2x) writeByte(SCALA2X)
+ if (flags is InSuperCall) writeByte(INSUPERCALL)
+ if (sym.isTerm) {
+ if (flags is Implicit) writeByte(IMPLICIT)
+ if ((flags is Lazy) && !(sym is Module)) writeByte(LAZY)
+ if (flags is AbsOverride) writeByte(ABSOVERRIDE)
+ if (flags is Mutable) writeByte(MUTABLE)
+ if (flags is Accessor) writeByte(FIELDaccessor)
+ if (flags is CaseAccessor) writeByte(CASEaccessor)
+ if (flags is DefaultParameterized) writeByte(DEFAULTparameterized)
+ } else {
+ if (flags is Sealed) writeByte(SEALED)
+ if (flags is Abstract) writeByte(ABSTRACT)
+ if (flags is Trait) writeByte(TRAIT)
+ if (flags is Covariant) writeByte(COVARIANT)
+ if (flags is Contravariant) writeByte(CONTRAVARIANT)
+ }
+ sym.annotations.foreach(pickleAnnotation)
+ }
+
+ def pickleAnnotation(ann: Annotation) = {
+ writeByte(ANNOTATION)
+ withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
+ }
+
+ def updateMapWithDeltas[T](mp: collection.mutable.Map[T, Addr]) =
+ for (key <- mp.keysIterator.toBuffer[T]) mp(key) = adjusted(mp(key))
+
+ trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree))
+ assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, %")
+ compactify()
+ updateMapWithDeltas(symRefs)
+ }
+}
diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
new file mode 100644
index 000000000..9d2ac2f23
--- /dev/null
+++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -0,0 +1,889 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
+import StdNames._, Denotations._, Flags._, Constants._, Annotations._
+import util.Positions._
+import dotty.tools.dotc.ast.{tpd, Trees, untpd}
+import Trees._
+import Decorators._
+import TastyUnpickler._, TastyBuffer._, PositionPickler._
+import annotation.switch
+import scala.collection.{ mutable, immutable }
+import typer.Mode
+import config.Printers.pickling
+
+/** Unpickler for typed trees
+ * @param reader the reader from which to unpickle
+ * @param tastyName the nametable
+ */
+class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
+ import TastyFormat._
+ import TastyName._
+ import tpd._
+
+ private var readPositions = false
+ private var totalRange = NoPosition
+ private var positions: collection.Map[Addr, Position] = _
+
+ /** Make a subsequent call to `unpickle` return trees with positions
+ * @param totalRange the range position enclosing all returned trees,
+ * or NoPosition if positions should not be unpickled
+ * @param positions a map from tree addresses to their positions relative
+ * to positions of parent nodes.
+ */
+ def usePositions(totalRange: Position, positions: collection.Map[Addr, Position]): Unit = {
+ readPositions = true
+ this.totalRange = totalRange
+ this.positions = positions
+ }
+
+ private val symAtAddr = new mutable.HashMap[Addr, Symbol]
+ private val treeAtAddr = new mutable.HashMap[Addr, Tree]
+ private val typeAtAddr = new mutable.HashMap[Addr, Type] // currently populated only for types that are known to be SHAREd.
+ private var stubs: Set[Symbol] = Set()
+
+ private var roots: Set[SymDenotation] = null
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = {
+ this.roots = roots
+ new TreeReader(reader).fork.indexStats(reader.endAddr)
+ }
+
+ /** The unpickled trees */
+ def unpickle()(implicit ctx: Context): List[Tree] = {
+ assert(roots != null, "unpickle without previous enterTopLevel")
+ val stats = new TreeReader(reader)
+ .readIndexedStats(NoSymbol, reader.endAddr)(ctx.addMode(Mode.AllowDependentFunctions))
+ normalizePos(stats, totalRange)
+ stats
+ }
+
+ def toTermName(tname: TastyName): TermName = tname match {
+ case Simple(name) => name
+ case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
+ case Signed(original, params, result) => toTermName(original)
+ case Shadowed(original) => toTermName(original).shadowedName
+ case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
+ case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
+ case SuperAccessor(accessed) => ???
+ case DefaultGetter(meth, num) => ???
+ }
+
+ def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref))
+ def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName
+
+ class Completer(reader: TastyReader) extends LazyType {
+ import reader._
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ treeAtAddr(currentAddr) = new TreeReader(reader).readIndexedDef()
+ }
+ }
+
+ class TreeReader(val reader: TastyReader) {
+ import reader._
+
+ def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
+ def fork = forkAt(currentAddr)
+
+ def skipTree(tag: Int): Unit =
+ if (tag >= firstLengthTreeTag) goto(readEnd())
+ else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
+ else if (tag >= firstASTTreeTag) skipTree()
+ else if (tag >= firstNatTreeTag) readNat()
+ def skipTree(): Unit = skipTree(readByte())
+
+ def skipParams(): Unit =
+ while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
+
+ /** The next tag, following through SHARED tags */
+ def nextUnsharedTag: Int = {
+ val tag = nextByte
+ if (tag == SHARED) {
+ val lookAhead = fork
+ lookAhead.reader.readByte()
+ forkAt(lookAhead.reader.readAddr()).nextUnsharedTag
+ }
+ else tag
+ }
+
+ def readName(): TermName = toTermName(readNameRef())
+
+ def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
+ tastyName(readNameRef()) match {
+ case Signed(original, params, result) =>
+ var sig = Signature(params map toTypeName, toTypeName(result))
+ if (sig == Signature.NotAMethod) sig = Signature.NotAMethod
+ (toTermName(original), sig)
+ case name =>
+ toTermName(name)
+ }
+
+// ------ Reading types -----------------------------------------------------
+
+ /** Read names in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamNames[N <: Name](end: Addr): List[N] =
+ until(end) {
+ val name = readName().asInstanceOf[N]
+ skipTree()
+ name
+ }
+
+ /** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
+ until(end) { readNat(); readType().asInstanceOf[T] }
+
+ /** Read referece to definition and return symbol created at that definition */
+ def readSymRef()(implicit ctx: Context): Symbol = {
+ val start = currentAddr
+ val addr = readAddr()
+ symAtAddr get addr match {
+ case Some(sym) => sym
+ case None =>
+ // Create a stub; owner might be wrong but will be overwritten later.
+ forkAt(addr).createSymbol()
+ val sym = symAtAddr(addr)
+ ctx.log(i"forward reference to $sym")
+ stubs += sym
+ sym
+ }
+ }
+
+ /** Read a type */
+ def readType()(implicit ctx: Context): Type = {
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading type ${astTagToString(tag)} at $start")
+
+ def registeringType[T](tp: Type, op: => T): T = {
+ typeAtAddr(start) = tp
+ op
+ }
+
+ def readLengthType(): Type = {
+ val end = readEnd()
+
+ def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = {
+ val nameReader = fork
+ nameReader.skipTree() // skip result
+ val paramReader = nameReader.fork
+ (nameReader.readParamNames[N](end), paramReader)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPERtype =>
+ SuperType(readType(), readType())
+ case REFINEDtype =>
+ val parent = readType()
+ var name: Name = readName()
+ val ttag = nextUnsharedTag
+ if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
+ RefinedType(parent, name, rt => registeringType(rt, readType()))
+ // Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
+ // Eta expansion of the latter puts readType() out of the expression.
+ case APPLIEDtype =>
+ readType().appliedTo(until(end)(readType()))
+ case TYPEBOUNDS =>
+ TypeBounds(readType(), readType())
+ case TYPEALIAS =>
+ val alias = readType()
+ val variance =
+ if (nextByte == COVARIANT) { readByte(); 1 }
+ else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
+ else 0
+ TypeAlias(alias, variance)
+ case ANNOTATED =>
+ AnnotatedType(Annotation(readTerm()), readType())
+ case ANDtype =>
+ AndType(readType(), readType())
+ case ORtype =>
+ OrType(readType(), readType())
+ case BIND =>
+ val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
+ symAtAddr(start) = sym
+ TypeRef.withFixedSym(NoPrefix, sym.name, sym)
+ case POLYtype =>
+ val (names, paramReader) = readNamesSkipParams[TypeName]
+ val result = PolyType(names)(
+ pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
+ pt => readType())
+ goto(end)
+ result
+ case METHODtype =>
+ val (names, paramReader) = readNamesSkipParams[TermName]
+ val result = MethodType(names, paramReader.readParamTypes[Type](end))(
+ mt => registeringType(mt, readType()))
+ goto(end)
+ result
+ case PARAMtype =>
+ readTypeRef() match {
+ case binder: PolyType => PolyParam(binder, readNat())
+ case binder: MethodType => MethodParam(binder, readNat())
+ }
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case ENUMconst =>
+ ConstantType(Constant(readTermRef().termSymbol))
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ def readSimpleType(): Type = (tag: @switch) match {
+ case TYPEREFdirect | TERMREFdirect =>
+ NamedType.withFixedSym(NoPrefix, readSymRef())
+ case TYPEREFsymbol | TERMREFsymbol =>
+ readSymNameRef()
+ case TYPEREFpkg =>
+ readPackageRef().moduleClass.typeRef
+ case TERMREFpkg =>
+ readPackageRef().termRef
+ case TYPEREF =>
+ val name = readName().toTypeName
+ TypeRef(readType(), name)
+ case TERMREF =>
+ readNameSplitSig() match {
+ case name: TermName => TermRef.all(readType(), name)
+ case (name: TermName, sig: Signature) => TermRef.withSig(readType(), name, sig)
+ }
+ case THIS =>
+ ThisType.raw(readType().asInstanceOf[TypeRef])
+ case SKOLEMtype =>
+ SkolemType(readTypeRef())
+ case SHARED =>
+ val ref = readAddr()
+ typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
+ case UNITconst =>
+ ConstantType(Constant(()))
+ case TRUEconst =>
+ ConstantType(Constant(true))
+ case FALSEconst =>
+ ConstantType(Constant(false))
+ case BYTEconst =>
+ ConstantType(Constant(readInt().toByte))
+ case SHORTconst =>
+ ConstantType(Constant(readInt().toShort))
+ case CHARconst =>
+ ConstantType(Constant(readNat().toChar))
+ case INTconst =>
+ ConstantType(Constant(readInt()))
+ case LONGconst =>
+ ConstantType(Constant(readLongInt()))
+ case FLOATconst =>
+ ConstantType(Constant(java.lang.Float.intBitsToFloat(readInt())))
+ case DOUBLEconst =>
+ ConstantType(Constant(java.lang.Double.longBitsToDouble(readLongInt())))
+ case STRINGconst =>
+ ConstantType(Constant(readName().toString))
+ case NULLconst =>
+ ConstantType(Constant(null))
+ case BYNAMEtype =>
+ ExprType(readType())
+ }
+
+ if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
+ }
+
+ private def readSymNameRef()(implicit ctx: Context): Type = {
+ val sym = readSymRef()
+ val prefix = readType()
+ val res = NamedType.withSymAndName(prefix, sym, sym.name)
+ prefix match {
+ case prefix: ThisType if prefix.cls eq sym.owner => res.withDenot(sym.denot)
+ // without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
+ // the problem arises when a self type of a trait is a type parameter of the same trait.
+ case _ => res
+ }
+ }
+
+ private def readPackageRef()(implicit ctx: Context): TermSymbol = {
+ val name = readName()
+ if (name == nme.ROOT) defn.RootPackage
+ else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
+ else ctx.requiredPackage(name)
+ }
+
+ def readTypeRef(): Type =
+ typeAtAddr(readAddr())
+
+ def readPath()(implicit ctx: Context): Type = {
+ val tp = readType()
+ assert(tp.isInstanceOf[SingletonType])
+ tp
+ }
+
+ def readTermRef()(implicit ctx: Context): TermRef =
+ readType().asInstanceOf[TermRef]
+
+// ------ Reading definitions -----------------------------------------------------
+
+ private def noRhs(end: Addr): Boolean =
+ currentAddr == end || isModifierTag(nextByte)
+
+ private def localContext(owner: Symbol)(implicit ctx: Context) = {
+ val lctx = ctx.fresh.setOwner(owner)
+ if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
+ }
+
+ private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbstractType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
+ val lacksDefinition =
+ rhsIsEmpty &&
+ name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
+ isAbstractType
+ var flags = givenFlags
+ if (lacksDefinition && tag != PARAM) flags |= Deferred
+ if (tag == DEFDEF) flags |= Method
+ if (givenFlags is Module)
+ flags = flags | (if (tag == VALDEF) ModuleCreationFlags else ModuleClassCreationFlags)
+ if (ctx.owner.isClass) {
+ if (tag == TYPEPARAM) flags |= Param
+ else if (tag == PARAM) flags |= ParamAccessor
+ }
+ else if (isParamTag(tag)) flags |= Param
+ flags
+ }
+
+ /** Create symbol of definition node and enter in symAtAddr map
+ * @return true iff the definition does not contain initialization code
+ */
+ def createSymbol()(implicit ctx: Context): Boolean = {
+ val start = currentAddr
+ val tag = readByte()
+ val end = readEnd()
+ val rawName = tastyName(readNameRef())
+ var name: Name = toTermName(rawName)
+ if (tag == TYPEDEF || tag == TYPEPARAM) name = name.toTypeName
+ skipParams()
+ val ttag = nextUnsharedTag
+ val isAbstractType = ttag == TYPEBOUNDS
+ val isClass = ttag == TEMPLATE
+ val templateStart = currentAddr
+ skipTree() // tpt
+ val rhsIsEmpty = noRhs(end)
+ if (!rhsIsEmpty) skipTree()
+ val (givenFlags, annots, privateWithin) = readModifiers(end)
+ val expandedFlag = if (rawName.isInstanceOf[TastyName.Expanded]) ExpandedName else EmptyFlags
+ pickling.println(i"creating symbol $name at $start with flags $givenFlags")
+ val flags = normalizeFlags(tag, givenFlags | expandedFlag, name, isAbstractType, rhsIsEmpty)
+ def adjustIfModule(completer: LazyType) =
+ if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
+ val sym =
+ roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
+ case Some(rootd) =>
+ pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}")
+ rootd.info = adjustIfModule(
+ new Completer(subReader(start, end)) with SymbolLoaders.SecondCompleter)
+ rootd.flags = flags &~ Touched // allow one more completion
+ rootd.privateWithin = privateWithin
+ rootd.symbol
+ case _ =>
+ val completer = adjustIfModule(new Completer(subReader(start, end)))
+ if (isClass)
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
+ privateWithin, coord = start.index)
+ else {
+ val sym = symAtAddr.get(start) match {
+ case Some(preExisting) =>
+ assert(stubs contains preExisting)
+ stubs -= preExisting
+ preExisting
+ case none =>
+ ctx.newNakedSymbol(start.index)
+ }
+ val denot = ctx.SymDenotation(symbol = sym, owner = ctx.owner, name, flags, completer, privateWithin)
+ sym.denot = denot
+ sym
+ }
+ } // TODO set position
+ sym.annotations = annots
+ ctx.enter(sym)
+ symAtAddr(start) = sym
+ if (isClass) {
+ sym.completer.withDecls(newScope)
+ forkAt(templateStart).indexTemplateParams()(localContext(sym))
+ }
+ tag != VALDEF || rhsIsEmpty
+ }
+
+ /** Read modifier list into triplet of flags, annotations and a privateWithin
+ * boindary symbol.
+ */
+ def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
+ var flags: FlagSet = EmptyFlags
+ var annots = new mutable.ListBuffer[Annotation]
+ var privateWithin: Symbol = NoSymbol
+ while (currentAddr.index != end.index) {
+ def addFlag(flag: FlagSet) = {
+ flags |= flag
+ readByte()
+ }
+ nextByte match {
+ case PRIVATE => addFlag(Private)
+ case INTERNAL => ??? // addFlag(Internal)
+ case PROTECTED => addFlag(Protected)
+ case ABSTRACT => addFlag(Abstract)
+ case FINAL => addFlag(Final)
+ case SEALED => addFlag(Sealed)
+ case CASE => addFlag(Case)
+ case IMPLICIT => addFlag(Implicit)
+ case LAZY => addFlag(Lazy)
+ case OVERRIDE => addFlag(Override)
+ case INLINE => addFlag(Inline)
+ case ABSOVERRIDE => addFlag(AbsOverride)
+ case STATIC => addFlag(JavaStatic)
+ case OBJECT => addFlag(Module)
+ case TRAIT => addFlag(Trait)
+ case LOCAL => addFlag(Local)
+ case SYNTHETIC => addFlag(Synthetic)
+ case ARTIFACT => addFlag(Artifact)
+ case MUTABLE => addFlag(Mutable)
+ case LABEL => addFlag(Label)
+ case FIELDaccessor => addFlag(Accessor)
+ case CASEaccessor => addFlag(CaseAccessor)
+ case COVARIANT => addFlag(Covariant)
+ case CONTRAVARIANT => addFlag(Contravariant)
+ case SCALA2X => addFlag(Scala2x)
+ case DEFAULTparameterized => addFlag(DefaultParameterized)
+ case INSUPERCALL => addFlag(InSuperCall)
+ case PRIVATEqualified =>
+ readByte()
+ privateWithin = readType().typeSymbol
+ case PROTECTEDqualified =>
+ addFlag(Protected)
+ privateWithin = readType().typeSymbol
+ case ANNOTATION =>
+ readByte()
+ val end = readEnd()
+ val sym = readType().typeSymbol
+ val lazyAnnotTree = readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+ annots += Annotation.deferred(sym, _ => lazyAnnotTree.complete)
+ case _ =>
+ assert(false, s"illegal modifier tag at $currentAddr")
+ }
+ }
+ (flags, annots.toList, privateWithin)
+ }
+
+ /** Create symbols for a definitions in statement sequence between
+ * current address and `end`.
+ * @return true iff none of the statements contains initialization code
+ */
+ def indexStats(end: Addr)(implicit ctx: Context): Boolean = {
+ val noInitss =
+ until(end) {
+ nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createSymbol()
+ case IMPORT =>
+ skipTree()
+ true
+ case PACKAGE =>
+ processPackage { (pid, end) => implicit ctx => indexStats(end) }
+ case _ =>
+ skipTree()
+ false
+ }
+ }
+ noInitss.forall(_ == true)
+ }
+
+ /** Process package with given operation `op`. The operation takes as arguments
+ * - a `RefTree` representing the `pid` of the package,
+ * - an end address,
+ * - a context which has the processd package as owner
+ */
+ def processPackage[T](op: (RefTree, Addr) => Context => T)(implicit ctx: Context): T = {
+ readByte()
+ val end = readEnd()
+ val pid = ref(readTermRef()).asInstanceOf[RefTree]
+ op(pid, end)(localContext(pid.symbol.moduleClass))
+ }
+
+ /** Create symbols the longest consecutive sequence of parameters with given
+ * `tag starting at current address.
+ */
+ def indexParams(tag: Int)(implicit ctx: Context) =
+ while (nextByte == tag) createSymbol()
+
+ /** Create symbols for all type and value parameters of template starting
+ * at current address.
+ */
+ def indexTemplateParams()(implicit ctx: Context) = {
+ assert(readByte() == TEMPLATE)
+ readEnd()
+ indexParams(TYPEPARAM)
+ indexParams(PARAM)
+ }
+
+ /** If definition was already read by a completer, return the previously read tree
+ * or else read definition.
+ */
+ def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
+ case Some(tree) => skipTree(); tree
+ case none => readNewDef()
+ }
+
+ private def readNewDef()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ val sym = symAtAddr(start)
+ val tag = readByte()
+ val end = readEnd()
+
+ def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = {
+ fork.indexParams(tag)
+ readIndexedParams(tag)
+ }
+
+ def readParamss(implicit ctx: Context): List[List[ValDef]] = {
+ collectWhile(nextByte == PARAMS) {
+ readByte()
+ readEnd()
+ readParams[ValDef](PARAM)
+ }
+ }
+
+ def readRhs(implicit ctx: Context) =
+ if (noRhs(end)) EmptyTree
+ else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+
+ def localCtx = localContext(sym)
+
+ def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
+ ta.assignType(
+ untpd.DefDef(
+ sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
+ sym)
+
+ def ta = ctx.typeAssigner
+
+ val name = readName()
+ pickling.println(s"reading def of $name at $start")
+ val tree: MemberDef = tag match {
+ case DEFDEF =>
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val vparamss = readParamss(localCtx)
+ val tpt = readTpt()
+ val typeParams = tparams.map(_.symbol)
+ val valueParamss = ctx.normalizeIfConstructor(
+ vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
+ val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe)
+ sym.info = ctx.methodType(typeParams, valueParamss, resType)
+ DefDef(tparams, vparamss, tpt)
+ case VALDEF =>
+ sym.info = readType()
+ ValDef(sym.asTerm, readRhs(localCtx))
+ case TYPEDEF | TYPEPARAM =>
+ if (sym.isClass)
+ ta.assignType(untpd.TypeDef(sym.name.asTypeName, readTemplate(localCtx)), sym)
+ else {
+ sym.info = readType()
+ TypeDef(sym.asType)
+ }
+ case PARAM =>
+ val info = readType()
+ if (noRhs(end)) {
+ sym.info = info
+ ValDef(sym.asTerm)
+ }
+ else {
+ sym.setFlag(Method)
+ sym.info = ExprType(info)
+ pickling.println(i"reading param alias $name -> $currentAddr")
+ DefDef(Nil, Nil, TypeTree(info))
+ }
+ }
+ val mods =
+ if (sym.annotations.isEmpty) EmptyModifiers
+ else Modifiers(annotations = sym.annotations.map(_.tree))
+ tree.withMods(mods) // record annotations in tree so that tree positions can be filled in.
+ goto(end)
+ setPos(start, tree)
+ }
+
+ private def readTemplate(implicit ctx: Context): Template = {
+ val start = currentAddr
+ val cls = ctx.owner.asClass
+ def setClsInfo(parents: List[TypeRef], selfType: Type) =
+ cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
+ setClsInfo(Nil, NoType)
+ val localDummy = ctx.newLocalDummy(cls)
+ assert(readByte() == TEMPLATE)
+ val end = readEnd()
+ val tparams = readIndexedParams[TypeDef](TYPEPARAM)
+ val vparams = readIndexedParams[ValDef](PARAM)
+ val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) {
+ nextByte match {
+ case APPLY | TYPEAPPLY => readTerm()
+ case _ => readTpt()
+ }
+ }
+ val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
+ val self =
+ if (nextByte == SELFDEF) {
+ readByte()
+ untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
+ }
+ else EmptyValDef
+ setClsInfo(parentRefs, if (self.isEmpty) NoType else self.tpt.tpe)
+ val noInits = fork.indexStats(end)
+ if (noInits) cls.setFlag(NoInits)
+ val constr = readIndexedDef().asInstanceOf[DefDef]
+
+ def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
+ (tparams, stats) match {
+ case (tparam :: tparams1, (alias: TypeDef) :: stats1)
+ if tparam.name == alias.name.expandedName(cls) =>
+ val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
+ (tparam :: alias :: tas, stats2)
+ case _ =>
+ (tparams, stats)
+ }
+
+ val lazyStats = readLater(end, rdr => implicit ctx => {
+ val stats0 = rdr.readIndexedStats(localDummy, end)
+ val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
+ tparamsAndAliases ++ vparams ++ stats
+ })
+ setPos(start,
+ untpd.Template(constr, parents, self, lazyStats)
+ .withType(localDummy.nonMemberTermRef))
+ }
+
+ def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
+ case TYPEDEF | VALDEF | DEFDEF =>
+ readIndexedDef()
+ case IMPORT =>
+ readImport()
+ case PACKAGE =>
+ val start = currentAddr
+ processPackage { (pid, end) => implicit ctx =>
+ setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
+ }
+ case _ =>
+ readTerm()(ctx.withOwner(exprOwner))
+ }
+
+ def readImport()(implicit ctx: Context): Tree = {
+ readByte()
+ readEnd()
+ val expr = readTerm()
+ def readSelectors(): List[untpd.Tree] = nextByte match {
+ case RENAMED =>
+ readByte()
+ readEnd()
+ untpd.Pair(untpd.Ident(readName()), untpd.Ident(readName())) :: readSelectors()
+ case IMPORTED =>
+ readByte()
+ untpd.Ident(readName()) :: readSelectors()
+ case _ =>
+ Nil
+ }
+ Import(expr, readSelectors())
+ }
+
+ def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
+ until(end)(readIndexedStat(exprOwner))
+
+ def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
+ fork.indexStats(end)
+ readIndexedStats(exprOwner, end)
+ }
+
+ def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
+ collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
+
+// ------ Reading terms -----------------------------------------------------
+
+ def readTerm()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading term ${astTagToString(tag)} at $start")
+
+ def readPathTerm(): Tree = {
+ goto(start)
+ readPath() match {
+ case path: TermRef => ref(path)
+ case path: ThisType => This(path.cls)
+ case path: ConstantType => Literal(path.value)
+ }
+ }
+
+ def readSimpleTerm(): Tree = tag match {
+ case IDENT =>
+ untpd.Ident(readName()).withType(readType())
+ case SELECT =>
+ def readQual(name: Name) = {
+ val localCtx =
+ if (name == nme.CONSTRUCTOR) ctx.fresh.addMode(Mode.InSuperCall) else ctx
+ readTerm()(localCtx)
+ }
+ def readRest(name: Name, sig: Signature) = {
+ val unshadowed = if (name.isShadowedName) name.revertShadowed else name
+ val qual = readQual(name)
+ untpd.Select(qual, unshadowed)
+ .withType(TermRef.withSig(qual.tpe.widenIfUnstable, name.asTermName, sig))
+ }
+ readNameSplitSig match {
+ case name: Name => readRest(name, Signature.NotAMethod)
+ case (name: Name, sig: Signature) => readRest(name, sig)
+ }
+
+ case NEW =>
+ New(readTpt())
+ case _ =>
+ readPathTerm()
+ }
+
+ def readLengthTerm(): Tree = {
+ val end = readEnd()
+
+ val result =
+ (tag: @switch) match {
+ case SUPER =>
+ val qual = readTerm()
+ val mixClass = ifBefore(end)(readType().typeSymbol, NoSymbol)
+ val mixName = if (mixClass.exists) mixClass.name.asTypeName else tpnme.EMPTY
+ tpd.Super(qual, mixName, ctx.mode.is(Mode.InSuperCall), mixClass)
+ case APPLY =>
+ val fn = readTerm()
+ val isJava = fn.tpe.isInstanceOf[JavaMethodType]
+ def readArg() = readTerm() match {
+ case SeqLiteral(elems) if isJava => JavaSeqLiteral(elems)
+ case arg => arg
+ }
+ tpd.Apply(fn, until(end)(readArg()))
+ case TYPEAPPLY =>
+ tpd.TypeApply(readTerm(), until(end)(readTpt()))
+ case PAIR =>
+ Pair(readTerm(), readTerm())
+ case TYPED =>
+ Typed(readTerm(), readTpt())
+ case NAMEDARG =>
+ NamedArg(readName(), readTerm())
+ case ASSIGN =>
+ Assign(readTerm(), readTerm())
+ case BLOCK =>
+ val exprReader = fork
+ skipTree()
+ val localCtx = ctx.fresh.setNewScope
+ val stats = readStats(ctx.owner, end)(localCtx)
+ val expr = exprReader.readTerm()(localCtx)
+ Block(stats, expr)
+ case IF =>
+ If(readTerm(), readTerm(), readTerm())
+ case LAMBDA =>
+ val meth = readTerm()
+ val tpt = ifBefore(end)(readTpt(), EmptyTree)
+ Closure(Nil, meth, tpt)
+ case MATCH =>
+ Match(readTerm(), readCases(end))
+ case RETURN =>
+ val from = readSymRef()
+ val expr = ifBefore(end)(readTerm(), EmptyTree)
+ Return(expr, Ident(from.termRef))
+ case TRY =>
+ Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))
+ case REPEATED =>
+ SeqLiteral(until(end)(readTerm()))
+ case BIND =>
+ val name = readName()
+ val info = readType()
+ val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, info)
+ symAtAddr(start) = sym
+ Bind(sym, readTerm())
+ case ALTERNATIVE =>
+ Alternative(until(end)(readTerm()))
+ case UNAPPLY =>
+ val fn = readTerm()
+ val implicitArgs =
+ collectWhile(nextByte == IMPLICITarg) {
+ readByte()
+ readTerm()
+ }
+ val patType = readType()
+ val argPats = until(end)(readTerm())
+ UnApply(fn, implicitArgs, argPats, patType)
+ case _ =>
+ readPathTerm()
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
+ tree.overwriteType(tree.tpe.simplified)
+ setPos(start, tree)
+ }
+
+ def readTpt()(implicit ctx: Context) = {
+ val start = currentAddr
+ val tp = readType()
+ if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
+ }
+
+ def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
+ collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
+
+ def readCase()(implicit ctx: Context): CaseDef = {
+ val start = currentAddr
+ readByte()
+ val end = readEnd()
+ val pat = readTerm()
+ val rhs = readTerm()
+ val guard = ifBefore(end)(readTerm(), EmptyTree)
+ setPos(start, CaseDef(pat, guard, rhs))
+ }
+
+ def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
+ val localReader = fork
+ goto(end)
+ new LazyReader(localReader, op)
+ }
+
+// ------ Hooks for positions ------------------------------------------------
+
+ /** Record address from which tree was created as a temporary position in the tree.
+ * The temporary position contains deltas relative to the position of the (as yet unknown)
+ * parent node. It is marked as a non-synthetic source position.
+ */
+ def setPos[T <: Tree](addr: Addr, tree: T): T = {
+ if (readPositions)
+ tree.setPosUnchecked(positions.getOrElse(addr, Position(0, 0, 0)))
+ tree
+ }
+ }
+
+ private def setNormalized(tree: Tree, parentPos: Position): Unit =
+ tree.setPosUnchecked(
+ if (tree.pos.exists)
+ Position(parentPos.start + offsetToInt(tree.pos.start), parentPos.end - tree.pos.end)
+ else
+ parentPos)
+
+ def normalizePos(x: Any, parentPos: Position)(implicit ctx: Context): Unit =
+ traverse(x, parentPos, setNormalized)
+
+ class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] with DeferredPosition {
+ def complete(implicit ctx: Context): T = {
+ pickling.println(i"starting to read at ${reader.reader.currentAddr}")
+ val res = op(reader)(ctx.addMode(Mode.AllowDependentFunctions))
+ normalizePos(res, parentPos)
+ res
+ }
+ }
+
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
+ extends LazyAnnotation(sym) with DeferredPosition {
+ def complete(implicit ctx: Context) = {
+ val res = reader.readTerm()
+ normalizePos(res, parentPos)
+ res
+ }
+ }
+}