aboutsummaryrefslogtreecommitdiff
path: root/src/dotty/tools/dotc/core
diff options
context:
space:
mode:
authorDmitry Petrashko <dmitry.petrashko@gmail.com>2015-04-09 16:57:55 +0200
committerDmitry Petrashko <dmitry.petrashko@gmail.com>2015-04-09 16:57:55 +0200
commitafa630a78b4f2cd9bd799b5a0199b99548f18aaa (patch)
tree9da8ad19573c8c913b47a232a3b57662aa660ec1 /src/dotty/tools/dotc/core
parent33f5d62bcbc6ec649e739958fc19c4524305471c (diff)
downloaddotty-afa630a78b4f2cd9bd799b5a0199b99548f18aaa.tar.gz
dotty-afa630a78b4f2cd9bd799b5a0199b99548f18aaa.tar.bz2
dotty-afa630a78b4f2cd9bd799b5a0199b99548f18aaa.zip
Remove trailing spaces in Dotty source.
Diffstat (limited to 'src/dotty/tools/dotc/core')
-rw-r--r--src/dotty/tools/dotc/core/Constraint.scala54
-rw-r--r--src/dotty/tools/dotc/core/ConstraintHandling.scala38
-rw-r--r--src/dotty/tools/dotc/core/Contexts.scala2
-rw-r--r--src/dotty/tools/dotc/core/Definitions.scala6
-rw-r--r--src/dotty/tools/dotc/core/Denotations.scala6
-rw-r--r--src/dotty/tools/dotc/core/Flags.scala10
-rw-r--r--src/dotty/tools/dotc/core/NameOps.scala4
-rw-r--r--src/dotty/tools/dotc/core/OrderingConstraint.scala192
-rw-r--r--src/dotty/tools/dotc/core/Scopes.scala8
-rw-r--r--src/dotty/tools/dotc/core/Skolemization.scala30
-rw-r--r--src/dotty/tools/dotc/core/SymDenotations.scala4
-rw-r--r--src/dotty/tools/dotc/core/TypeApplications.scala8
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala32
-rw-r--r--src/dotty/tools/dotc/core/TypeOps.scala4
-rw-r--r--src/dotty/tools/dotc/core/TyperState.scala2
-rw-r--r--src/dotty/tools/dotc/core/Types.scala58
-rw-r--r--src/dotty/tools/dotc/core/pickling/ClassfileParser.scala12
-rw-r--r--src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala10
-rw-r--r--src/dotty/tools/dotc/core/pickling/NameBuffer.scala20
-rw-r--r--src/dotty/tools/dotc/core/pickling/PositionPickler.scala16
-rw-r--r--src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala4
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyBuffer.scala50
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyName.scala10
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyPickler.scala14
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyPrinter.scala28
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyReader.scala54
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala32
-rw-r--r--src/dotty/tools/dotc/core/pickling/TreeBuffer.scala34
-rw-r--r--src/dotty/tools/dotc/core/pickling/TreePickler.scala92
-rw-r--r--src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala184
-rw-r--r--src/dotty/tools/dotc/core/pickling/UnPickler.scala4
31 files changed, 511 insertions, 511 deletions
diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala
index bc0d476a9..5a758f144 100644
--- a/src/dotty/tools/dotc/core/Constraint.scala
+++ b/src/dotty/tools/dotc/core/Constraint.scala
@@ -12,16 +12,16 @@ import config.Printers._
/** Constraint over undetermined type parameters. Constraints are built
* over values of the following types:
- *
+ *
* - PolyType A constraint constrains the type parameters of a set of PolyTypes
* - PolyParam The parameters of the constrained polytypes
- * - TypeVar Every constrained parameter might be associated with a TypeVar
+ * - TypeVar Every constrained parameter might be associated with a TypeVar
* that has the PolyParam as origin.
*/
abstract class Constraint extends Showable {
-
+
type This <: Constraint
-
+
/** Does the constraint's domain contain the type parameters of `pt`? */
def contains(pt: PolyType): Boolean
@@ -30,34 +30,34 @@ abstract class Constraint extends Showable {
/** Does this constraint contain the type variable `tvar` and is it uninstantiated? */
def contains(tvar: TypeVar): Boolean
-
+
/** The constraint entry for given type parameter `param`, or NoType if `param` is not part of
* the constraint domain.
*/
def entry(param: PolyParam): Type
-
+
/** The type variable corresponding to parameter `param`, or
* NoType, if `param` is not in constrained or is not paired with a type variable.
*/
def typeVarOfParam(param: PolyParam): Type
-
+
/** Is it known that `param1 <:< param2`? */
def isLess(param1: PolyParam, param2: PolyParam): Boolean
- /** The parameters that are known to be smaller wrt <: than `param` */
+ /** The parameters that are known to be smaller wrt <: than `param` */
def lower(param: PolyParam): List[PolyParam]
-
- /** The parameters that are known to be greater wrt <: than `param` */
+
+ /** The parameters that are known to be greater wrt <: than `param` */
def upper(param: PolyParam): List[PolyParam]
-
- /** lower(param) \ lower(butNot) */
+
+ /** lower(param) \ lower(butNot) */
def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam]
-
- /** upper(param) \ upper(butNot) */
+
+ /** upper(param) \ upper(butNot) */
def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam]
/** The constraint bounds for given type parameter `param`.
- * Poly params that are known to be smaller or greater than `param`
+ * Poly params that are known to be smaller or greater than `param`
* are not contained in the return bounds.
* @pre `param` is not part of the constraint domain.
*/
@@ -65,16 +65,16 @@ abstract class Constraint extends Showable {
/** The lower bound of `param` including all known-to-be-smaller parameters */
def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type
-
+
/** The upper bound of `param` including all known-to-be-greater parameters */
def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type
-
+
/** The bounds of `param` including all known-to-be-smaller and -greater parameters */
def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds
-
+
/** A new constraint which is derived from this constraint by adding
* entries for all type parameters of `poly`.
- * @param tvars A list of type variables associated with the params,
+ * @param tvars A list of type variables associated with the params,
* or Nil if the constraint will just be checked for
* satisfiability but will solved to give instances of
* type variables.
@@ -84,15 +84,15 @@ abstract class Constraint extends Showable {
/** A new constraint which is derived from this constraint by updating
* the entry for parameter `param` to `tp`.
* `tp` can be one of the following:
- *
+ *
* - A TypeBounds value, indicating new constraint bounds
* - Another type, indicating a solution for the parameter
- *
- * @pre `this contains param`.
+ *
+ * @pre `this contains param`.
*/
def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This
-
- /** A constraint that includes the relationship `p1 <: p2`.
+
+ /** A constraint that includes the relationship `p1 <: p2`.
* `<:` relationships between parameters ("edges") are propagated, but
* non-parameter bounds are left alone.
*/
@@ -113,17 +113,17 @@ abstract class Constraint extends Showable {
/** Narrow one of the bounds of type parameter `param`
* If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure
- * that `param >: bound`.
+ * that `param >: bound`.
*/
def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This
-
+
/** Is entry associated with `pt` removable?
* @param removedParam The index of a parameter which is still present in the
* entry array, but is going to be removed at the same step,
* or -1 if no such parameter exists.
*/
def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean
-
+
/** A new constraint with all entries coming from `pt` removed. */
def remove(pt: PolyType)(implicit ctx: Context): This
diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala
index 796960337..8caacfb2f 100644
--- a/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -10,28 +10,28 @@ import config.Printers._
/** Methods for adding constraints and solving them.
*
* What goes into a Constraint as opposed to a ConstrainHandler?
- *
+ *
* Constraint code is purely functional: Operations get constraints and produce new ones.
- * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done
+ * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done
* elsewhere.
- *
+ *
* By comparison: Constraint handlers are parts of type comparers and can use their functionality.
* Constraint handlers update the current constraint as a side effect.
*/
trait ConstraintHandling {
-
+
implicit val ctx: Context
-
+
protected def isSubType(tp1: Type, tp2: Type): Boolean
-
+
val state: TyperState
import state.constraint
-
+
private var addConstraintInvocations = 0
/** If the constraint is frozen we cannot add new bounds to the constraint. */
protected var frozenConstraint = false
-
+
private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean =
!constraint.contains(param) || {
val c1 = constraint.narrowBound(param, bound, isUpper)
@@ -57,7 +57,7 @@ trait ConstraintHandling {
constr.println(i"added $description = $res")
res
}
-
+
protected def addLowerBound(param: PolyParam, bound: Type): Boolean = {
def description = i"constraint $param >: $bound to\n$constraint"
constr.println(i"adding $description")
@@ -68,11 +68,11 @@ trait ConstraintHandling {
constr.println(i"added $description = $res")
res
}
-
+
protected def addLess(p1: PolyParam, p2: PolyParam): Boolean = {
def description = i"ordering $p1 <: $p2 to\n$constraint"
val res =
- if (constraint.isLess(p2, p1)) unify(p2, p1)
+ if (constraint.isLess(p2, p1)) unify(p2, p1)
else {
val down1 = p1 :: constraint.exclusiveLower(p1, p2)
val up2 = p2 :: constraint.exclusiveUpper(p2, p1)
@@ -86,7 +86,7 @@ trait ConstraintHandling {
constr.println(i"added $description = $res")
res
}
-
+
/** Make p2 = p1, transfer all bounds of p2 to p1
* @pre less(p1)(p2)
*/
@@ -100,10 +100,10 @@ trait ConstraintHandling {
val lo = bounds.lo
val hi = bounds.hi
isSubType(lo, hi) &&
- down.forall(addOneBound(_, hi, isUpper = true)) &&
+ down.forall(addOneBound(_, hi, isUpper = true)) &&
up.forall(addOneBound(_, lo, isUpper = false))
}
-
+
protected final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
val saved = frozenConstraint
frozenConstraint = true
@@ -164,20 +164,20 @@ trait ConstraintHandling {
else {
val saved = constraint
try
- c2.forallParams(p =>
+ c2.forallParams(p =>
c1.contains(p) &&
c2.upper(p).forall(c1.isLess(p, _)) &&
isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)))
finally constraint = saved
}
-
+
/** The current bounds of type parameter `param` */
final def bounds(param: PolyParam): TypeBounds = constraint.entry(param) match {
case bounds: TypeBounds => bounds
case _ => param.binder.paramBounds(param.paramNum)
}
-
- /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
+
+ /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
* and propagate all bounds.
* @param tvars See Constraint#add
*/
@@ -223,7 +223,7 @@ trait ConstraintHandling {
finally addConstraintInvocations -= 1
}
}
-
+
/** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */
def checkPropagated(msg: => String)(result: Boolean): Boolean = {
if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) {
diff --git a/src/dotty/tools/dotc/core/Contexts.scala b/src/dotty/tools/dotc/core/Contexts.scala
index 645aca601..61d4e9874 100644
--- a/src/dotty/tools/dotc/core/Contexts.scala
+++ b/src/dotty/tools/dotc/core/Contexts.scala
@@ -170,7 +170,7 @@ object Contexts {
if (implicitsCache == null )
implicitsCache = {
val implicitRefs: List[TermRef] =
- if (isClassDefContext)
+ if (isClassDefContext)
try owner.thisType.implicitMembers
catch {
case ex: CyclicReference => Nil
diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala
index 89e4bd371..7accf9148 100644
--- a/src/dotty/tools/dotc/core/Definitions.scala
+++ b/src/dotty/tools/dotc/core/Definitions.scala
@@ -175,14 +175,14 @@ class Definitions {
def ObjectMethods = List(Object_eq, Object_ne, Object_synchronized, Object_clone,
Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI)
-
+
/** Dummy method needed by elimByName */
lazy val dummyApply = newPolyMethod(
OpsPackageClass, nme.dummyApply, 1,
pt => MethodType(List(FunctionType(Nil, PolyParam(pt, 0))), PolyParam(pt, 0)))
-
+
/** Method representing a throw */
- lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
+ lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
MethodType(List(ThrowableType), NothingType))
lazy val NothingClass: ClassSymbol = newCompleteClassSymbol(
diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala
index 849e934f0..f038e8f2f 100644
--- a/src/dotty/tools/dotc/core/Denotations.scala
+++ b/src/dotty/tools/dotc/core/Denotations.scala
@@ -476,14 +476,14 @@ object Denotations {
/** The version of this SingleDenotation that was valid in the first phase
* of this run.
*/
- def initial: SingleDenotation =
+ def initial: SingleDenotation =
if (validFor == Nowhere) this
else {
var current = nextInRun
while (current.validFor.code > this.myValidFor.code) current = current.nextInRun
current
}
-
+
def history: List[SingleDenotation] = {
val b = new ListBuffer[SingleDenotation]
var current = initial
@@ -497,7 +497,7 @@ object Denotations {
/** Invalidate all caches and fields that depend on base classes and their contents */
def invalidateInheritedInfo(): Unit = ()
-
+
/** Move validity period of this denotation to a new run. Throw a StaleSymbol error
* if denotation is no longer valid.
*/
diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala
index a60f7e70d..f62c3cae8 100644
--- a/src/dotty/tools/dotc/core/Flags.scala
+++ b/src/dotty/tools/dotc/core/Flags.scala
@@ -300,7 +300,7 @@ object Flags {
/** Method is assumed to be stable */
final val Stable = termFlag(24, "<stable>")
-
+
/** A case parameter accessor */
final val CaseAccessor = termFlag(25, "<caseaccessor>")
@@ -499,7 +499,7 @@ object Flags {
/** These flags are pickled */
final val PickledFlags = flagRange(FirstFlag, FirstNotPickledFlag)
-
+
final val AllFlags = flagRange(FirstFlag, MaxFlag)
/** An abstract class or a trait */
@@ -531,10 +531,10 @@ object Flags {
/** A type parameter or type parameter accessor */
final val TypeParamOrAccessor = TypeParam | TypeParamAccessor
-
- /** If symbol of a type alias has these flags, prefer the alias */
+
+ /** If symbol of a type alias has these flags, prefer the alias */
final val AliasPreferred = TypeParam | TypeArgument | ExpandedName
-
+
/** A covariant type parameter instance */
final val LocalCovariant = allOf(Local, Covariant)
diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala
index 1834dbe64..35607cf74 100644
--- a/src/dotty/tools/dotc/core/NameOps.scala
+++ b/src/dotty/tools/dotc/core/NameOps.scala
@@ -130,7 +130,7 @@ object NameOps {
/** If name ends in module class suffix, drop it */
def stripModuleClassSuffix: Name =
if (isModuleClassName) name dropRight MODULE_SUFFIX.length else name
-
+
/** Append a suffix so that this name does not clash with another name in the same scope */
def avoidClashName: TermName = (name ++ AVOID_CLASH_SUFFIX).toTermName
@@ -161,7 +161,7 @@ object NameOps {
val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
if (idx < 0) name else (name drop (idx + nme.EXPAND_SEPARATOR.length)).asInstanceOf[N]
}
-
+
def expandedPrefix: N = {
val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
assert(idx >= 0)
diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala
index 0c39a26a6..21d003451 100644
--- a/src/dotty/tools/dotc/core/OrderingConstraint.scala
+++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -16,10 +16,10 @@ object OrderingConstraint {
/** The type of `OrderingConstraint#boundsMap` */
type ParamBounds = SimpleMap[PolyType, Array[Type]]
-
+
/** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
type ParamOrdering = SimpleMap[PolyType, Array[List[PolyParam]]]
-
+
/** A new constraint with given maps */
private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
val result = new OrderingConstraint(boundsMap, lowerMap, upperMap)
@@ -27,29 +27,29 @@ object OrderingConstraint {
ctx.runInfo.recordConstraintSize(result, result.boundsMap.size)
result
}
-
+
/** A lens for updating a single entry array in one of the three constraint maps */
abstract class ConstraintLens[T <: AnyRef: ClassTag] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[T]
+ def entries(c: OrderingConstraint, poly: PolyType): Array[T]
def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[T])(implicit ctx: Context): OrderingConstraint
def initial: T
-
+
def apply(c: OrderingConstraint, poly: PolyType, idx: Int) = {
val es = entries(c, poly)
if (es == null) initial else es(idx)
}
-
+
/** The `current` constraint but with the entry for `param` updated to `entry`.
* `current` is used linearly. If it is different from `prev` it is
* known to be dead after the call. Hence it is OK to update destructively
* parts of `current` which are not shared by `prev`.
*/
- def update(prev: OrderingConstraint, current: OrderingConstraint,
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
poly: PolyType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = {
var es = entries(current, poly)
if (es != null && (es(idx) eq entry)) current
else {
- val result =
+ val result =
if (es == null) {
es = Array.fill(poly.paramNames.length)(initial)
updateEntries(current, poly, es)
@@ -64,40 +64,40 @@ object OrderingConstraint {
result
}
}
-
- def update(prev: OrderingConstraint, current: OrderingConstraint,
+
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
param: PolyParam, entry: T)(implicit ctx: Context): OrderingConstraint =
update(prev, current, param.binder, param.paramNum, entry)
-
- def map(prev: OrderingConstraint, current: OrderingConstraint,
- poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
+
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
update(prev, current, poly, idx, f(apply(current, poly, idx)))
- def map(prev: OrderingConstraint, current: OrderingConstraint,
- param: PolyParam, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ param: PolyParam, f: T => T)(implicit ctx: Context): OrderingConstraint =
map(prev, current, param.binder, param.paramNum, f)
}
val boundsLens = new ConstraintLens[Type] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
+ def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
c.boundsMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap)
def initial = NoType
}
-
+
val lowerLens = new ConstraintLens[List[PolyParam]] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
c.lowerMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap)
def initial = Nil
}
val upperLens = new ConstraintLens[List[PolyParam]] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
c.upperMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries))
def initial = Nil
}
@@ -105,7 +105,7 @@ object OrderingConstraint {
import OrderingConstraint._
-/** Constraint over undetermined type parameters that keeps separate maps to
+/** Constraint over undetermined type parameters that keeps separate maps to
* reflect parameter orderings.
* @param boundsMap a map from PolyType to arrays.
* Each array contains twice the number of entries as there a type parameters
@@ -115,23 +115,23 @@ import OrderingConstraint._
* An instantiated type parameter is represented by having its instance type in
* the corresponding array entry. The dual use of arrays for poly params
* and typevars is to save space and hopefully gain some speed.
- *
+ *
* @param lowerMap a map from PolyTypes to arrays. Each array entry corresponds
* to a parameter P of the polytype; it contains all constrained parameters
- * Q that are known to be smaller than P, i.e. Q <: P.
+ * Q that are known to be smaller than P, i.e. Q <: P.
* @param upperMap a map from PolyTypes to arrays. Each array entry corresponds
* to a parameter P of the polytype; it contains all constrained parameters
- * Q that are known to be greater than P, i.e. P <: Q.
+ * Q that are known to be greater than P, i.e. P <: Q.
*/
-class OrderingConstraint(private val boundsMap: ParamBounds,
- private val lowerMap : ParamOrdering,
+class OrderingConstraint(private val boundsMap: ParamBounds,
+ private val lowerMap : ParamOrdering,
private val upperMap : ParamOrdering) extends Constraint {
-
+
type This = OrderingConstraint
-
-
+
+
// ----------- Basic indices --------------------------------------------------
-
+
/** The number of type parameters in the given entry array */
private def paramCount(entries: Array[Type]) = entries.length >> 1
@@ -145,7 +145,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
if (entries == null) NoType
else entries(param.paramNum)
}
-
+
// ----------- Contains tests --------------------------------------------------
def contains(pt: PolyType): Boolean = boundsMap(pt) != null
@@ -163,42 +163,42 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
private def isBounds(tp: Type) = tp.isInstanceOf[TypeBounds]
-
+
// ---------- Dependency handling ----------------------------------------------
-
+
def lower(param: PolyParam): List[PolyParam] = lowerLens(this, param.binder, param.paramNum)
def upper(param: PolyParam): List[PolyParam] = upperLens(this, param.binder, param.paramNum)
-
+
def minLower(param: PolyParam): List[PolyParam] = {
val all = lower(param)
all.filterNot(p => all.exists(isLess(p, _)))
}
-
+
def minUpper(param: PolyParam): List[PolyParam] = {
val all = upper(param)
all.filterNot(p => all.exists(isLess(_, p)))
}
-
+
def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam] =
lower(param).filterNot(isLess(_, butNot))
-
+
def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam] =
upper(param).filterNot(isLess(butNot, _))
-
+
// ---------- Info related to PolyParams -------------------------------------------
def isLess(param1: PolyParam, param2: PolyParam): Boolean =
upper(param1).contains(param2)
- def nonParamBounds(param: PolyParam): TypeBounds =
+ def nonParamBounds(param: PolyParam): TypeBounds =
entry(param).asInstanceOf[TypeBounds]
-
+
def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type =
(nonParamBounds(param).lo /: minLower(param))(_ | _)
- def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type =
+ def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type =
(nonParamBounds(param).hi /: minUpper(param))(_ & _)
-
+
def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds =
nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param))
@@ -209,17 +209,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
val tvar = typeVar(entries, param.paramNum)
if (tvar != null) tvar else NoType
}
- }
+ }
// ---------- Adding PolyTypes --------------------------------------------------
-
+
/** The list of parameters P such that, for a fresh type parameter Q:
- *
+ *
* Q <: tp implies Q <: P and isUpper = true, or
* tp <: Q implies P <: Q and isUpper = false
*/
def dependentParams(tp: Type, isUpper: Boolean): List[PolyParam] = tp match {
- case param: PolyParam if contains(param) =>
+ case param: PolyParam if contains(param) =>
param :: (if (isUpper) upper(param) else lower(param))
case tp: AndOrType =>
val ps1 = dependentParams(tp.tp1, isUpper)
@@ -228,7 +228,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
case _ =>
Nil
}
-
+
/** The bound type `tp` without constrained parameters which are clearly
* dependent. A parameter in an upper bound is clearly dependent if it appears
* in a hole of a context H given by:
@@ -237,7 +237,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* H & T
* T & H
*
- * (the idea is that a parameter P in a H context is guaranteed to be a supertype of the
+ * (the idea is that a parameter P in a H context is guaranteed to be a supertype of the
* bounded parameter.)
* Analogously, a parameter in a lower bound is clearly dependent if it appears
* in a hole of a context H given by:
@@ -245,18 +245,18 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* L = []
* L | T
* T | L
- *
+ *
* "Clearly dependent" is not synonymous with "dependent" in the sense
* it is defined in `dependentParams`. Dependent parameters are handled
* in `updateEntry`. The idea of stripping off clearly dependent parameters
- * and to handle them separately is for efficiency, so that type expressions
+ * and to handle them separately is for efficiency, so that type expressions
* used as bounds become smaller.
- *
+ *
* @param isUpper If true, `bound` is an upper bound, else a lower bound.
*/
- private def stripParams(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ private def stripParams(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
isUpper: Boolean)(implicit ctx: Context): Type = tp match {
- case param: PolyParam if contains(param) =>
+ case param: PolyParam if contains(param) =>
if (!paramBuf.contains(param)) paramBuf += param
NoType
case tp: AndOrType if isUpper == tp.isAnd =>
@@ -268,17 +268,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
else tp2
case _ =>
tp
- }
-
+ }
+
/** The bound type `tp` without clearly dependent parameters.
* A top or bottom type if type consists only of dependent parameters.
* @param isUpper If true, `bound` is an upper bound, else a lower bound.
*/
- private def normalizedType(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
- isUpper: Boolean)(implicit ctx: Context): Type =
+ private def normalizedType(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ isUpper: Boolean)(implicit ctx: Context): Type =
stripParams(tp, paramBuf, isUpper)
.orElse(if (isUpper) defn.AnyType else defn.NothingType)
-
+
def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This = {
assert(!contains(poly))
val nparams = poly.paramNames.length
@@ -287,7 +287,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
tvars.copyToArray(entries1, nparams)
newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap).init(poly)
}
-
+
/** Split dependent parameters off the bounds for parameters in `poly`.
* Update all bounds to be normalized and update ordering to account for
* dependent parameters.
@@ -311,9 +311,9 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
if (Config.checkConstraintsNonCyclic) checkNonCyclic()
current
}
-
+
// ---------- Updates ------------------------------------------------------------
-
+
/** Add the fact `param1 <: param2` to the constraint `current` and propagate
* `<:<` relationships between parameters ("edges") but not bounds.
*/
@@ -331,31 +331,31 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def addLess(param1: PolyParam, param2: PolyParam)(implicit ctx: Context): This =
order(this, param1, param2)
-
+
def updateEntry(current: This, param: PolyParam, tp: Type)(implicit ctx: Context): This = {
var current1 = boundsLens.update(this, current, param, tp)
tp match {
case TypeBounds(lo, hi) =>
- for (p <- dependentParams(lo, isUpper = false))
+ for (p <- dependentParams(lo, isUpper = false))
current1 = order(current1, p, param)
- for (p <- dependentParams(hi, isUpper = true))
+ for (p <- dependentParams(hi, isUpper = true))
current1 = order(current1, param, p)
case _ =>
}
current1
}
-
+
def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This =
updateEntry(this, param, tp)
-
+
def unify(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This = {
val p1Bounds = (nonParamBounds(p1) & nonParamBounds(p2)).substParam(p2, p1)
updateEntry(p1, p1Bounds).replace(p2, p1)
}
-
+
def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This = {
- val oldBounds @ TypeBounds(lo, hi) = nonParamBounds(param)
- val newBounds =
+ val oldBounds @ TypeBounds(lo, hi) = nonParamBounds(param)
+ val newBounds =
if (isUpper) oldBounds.derivedTypeBounds(lo, hi & bound)
else oldBounds.derivedTypeBounds(lo | bound, hi)
updateEntry(param, newBounds)
@@ -368,40 +368,40 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* of the parameter elsewhere in the constraint by type `tp`, or a conservative
* approximation of it if that is needed to avoid cycles.
* Occurrences nested inside a refinement or prefix are not affected.
- *
+ *
* The reason we need to substitute top-level occurrences of the parameter
* is to deal with situations like the following. Say we have in the constraint
- *
+ *
* P <: Q & String
* Q
- *
+ *
* and we replace Q with P. Then substitution gives
- *
+ *
* P <: P & String
- *
+ *
* this would be a cyclic constraint is therefore changed by `normalize` and
* `recombine` below to
- *
+ *
* P <: String
- *
- * approximating the RHS occurrence of P with Any. Without the substitution we
+ *
+ * approximating the RHS occurrence of P with Any. Without the substitution we
* would not find out where we need to approximate. Occurrences of parameters
* that are not top-level are not affected.
*/
def replace(param: PolyParam, tp: Type)(implicit ctx: Context): OrderingConstraint = {
val replacement = tp.dealias.stripTypeVar
- if (param == replacement) this
+ if (param == replacement) this
else {
assert(replacement.isValueType)
val poly = param.binder
val idx = param.paramNum
-
- def removeParam(ps: List[PolyParam]) =
+
+ def removeParam(ps: List[PolyParam]) =
ps.filterNot(p => p.binder.eq(poly) && p.paramNum == idx)
-
+
def replaceParam(tp: Type, atPoly: PolyType, atIdx: Int) = tp match {
case bounds @ TypeBounds(lo, hi) =>
-
+
def recombine(andor: AndOrType, op: (Type, Boolean) => Type, isUpper: Boolean): Type = {
val tp1 = op(andor.tp1, isUpper)
val tp2 = op(andor.tp2, isUpper)
@@ -409,7 +409,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
else if (andor.isAnd) tp1 & tp2
else tp1 | tp2
}
-
+
def normalize(tp: Type, isUpper: Boolean): Type = tp match {
case p: PolyParam if p.binder == atPoly && p.paramNum == atIdx =>
if (isUpper) defn.AnyType else defn.NothingType
@@ -422,12 +422,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper)
case _ => tp
}
-
+
bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true))
case _ => tp
}
-
- var current =
+
+ var current =
if (isRemovable(poly, idx)) remove(poly) else updateEntry(param, replacement)
current.foreachParam {(p, i) =>
current = boundsLens.map(this, current, p, i, replaceParam(_, p, i))
@@ -438,7 +438,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
}
- def remove(pt: PolyType)(implicit ctx: Context): This =
+ def remove(pt: PolyType)(implicit ctx: Context): This =
newConstraint(boundsMap.remove(pt), lowerMap.remove(pt), upperMap.remove(pt))
def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean = {
@@ -461,7 +461,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def domainPolys: List[PolyType] = boundsMap.keys
- def domainParams: List[PolyParam] =
+ def domainParams: List[PolyParam] =
for {
(poly, entries) <- boundsMap.toList
n <- 0 until paramCount(entries)
@@ -475,7 +475,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
true
}
-
+
def foreachParam(p: (PolyType, Int) => Unit): Unit =
boundsMap.foreachBinding { (poly, entries) =>
0.until(poly.paramNames.length).foreach(p(poly, _))
@@ -513,17 +513,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def checkNonCyclic()(implicit ctx: Context): Unit =
domainParams.foreach(checkNonCyclic)
-
+
private def checkNonCyclic(param: PolyParam)(implicit ctx: Context): Unit =
assert(!isLess(param, param), i"cyclic constraint involving $param in $this")
-
+
// ---------- toText -----------------------------------------------------
override def toText(printer: Printer): Text = {
def entryText(tp: Type) = tp match {
- case tp: TypeBounds =>
+ case tp: TypeBounds =>
tp.toText(printer)
- case _ =>
+ case _ =>
" := " ~ tp.toText(printer)
}
val indent = 3
@@ -547,7 +547,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
ups = minUpper(param)
if ups.nonEmpty
}
- yield
+ yield
(" " * indent) ~ param.toText(printer) ~ " <: " ~
Text(ups.map(_.toText(printer)), ", ")
Text(deps, "\n")
diff --git a/src/dotty/tools/dotc/core/Scopes.scala b/src/dotty/tools/dotc/core/Scopes.scala
index 70bcbdee6..ad9ba4201 100644
--- a/src/dotty/tools/dotc/core/Scopes.scala
+++ b/src/dotty/tools/dotc/core/Scopes.scala
@@ -93,7 +93,7 @@ object Scopes {
/** Lookup next entry with same name as this one */
def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry
-
+
/** Lookup a symbol */
final def lookup(name: Name)(implicit ctx: Context): Symbol = {
val e = lookupEntry(name)
@@ -138,9 +138,9 @@ object Scopes {
}
def implicitDecls(implicit ctx: Context): List[TermRef] = Nil
-
+
def openForMutations: MutableScope = unsupported("openForMutations")
-
+
final def toText(printer: Printer): Text = printer.toText(this)
}
@@ -376,7 +376,7 @@ object Scopes {
}
syms
}
-
+
override def openForMutations: MutableScope = this
}
diff --git a/src/dotty/tools/dotc/core/Skolemization.scala b/src/dotty/tools/dotc/core/Skolemization.scala
index 1d0067a4f..fb47cb62a 100644
--- a/src/dotty/tools/dotc/core/Skolemization.scala
+++ b/src/dotty/tools/dotc/core/Skolemization.scala
@@ -5,42 +5,42 @@ import Symbols._, Types._, Contexts._
import collection.mutable
/** Methods to add and remove skolemtypes.
- *
- * Skolem types are generated when comparing refinements.
+ *
+ * Skolem types are generated when comparing refinements.
* A skolem type is simply a fresh singleton type that has a given type
* as underlying type.
- * Two skolem types are equal if they refer to the same underlying type.
+ * Two skolem types are equal if they refer to the same underlying type.
* To avoid unsoundness, skolem types have to be kept strictly local to the
* comparison, they are not allowed to escape the lifetime of a comparison
- * by surviving in a context or in GADT bounds.
+ * by surviving in a context or in GADT bounds.
*/
trait Skolemization {
-
+
implicit val ctx: Context
protected var skolemsOutstanding = false
-
+
def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match {
- case tp: SingletonType if tp.isStable =>
+ case tp: SingletonType if tp.isStable =>
tp
- case tp: ValueType =>
+ case tp: ValueType =>
skolemsOutstanding = true
SkolemType(tp)
- case tp: TypeProxy =>
+ case tp: TypeProxy =>
ensureStableSingleton(tp.underlying)
}
-
+
/** Approximate a type `tp` with a type that does not contain skolem types.
* @param toSuper if true, return the smallest supertype of `tp` with this property
* else return the largest subtype.
*/
- final def deSkolemize(tp: Type, toSuper: Boolean): Type =
- if (skolemsOutstanding) deSkolemize(tp, if (toSuper) 1 else -1, Set())
+ final def deSkolemize(tp: Type, toSuper: Boolean): Type =
+ if (skolemsOutstanding) deSkolemize(tp, if (toSuper) 1 else -1, Set())
else tp
private def deSkolemize(tp: Type, variance: Int, seen: Set[SkolemType]): Type =
ctx.traceIndented(s"deskolemize $tp, variance = $variance, seen = $seen = ") {
- def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType, newSeen: Set[SkolemType] = seen) =
+ def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType, newSeen: Set[SkolemType] = seen) =
if (variance == 0) NoType
else deSkolemize(if (variance < 0) lo else hi, variance, newSeen)
tp match {
@@ -71,7 +71,7 @@ trait Skolemization {
tp.derivedRefinedType(parent1, tp.refinedName, refinedInfo1)
else
approx(hi = parent1)
- }
+ }
else approx()
case tp: TypeAlias =>
val alias1 = deSkolemize(tp.alias, variance * tp.variance, seen)
@@ -107,7 +107,7 @@ trait Skolemization {
deSkolemizeMap.mapOver(tp, variance, seen)
}
}
-
+
object deSkolemizeMap extends TypeMap {
private var seen: Set[SkolemType] = _
def apply(tp: Type) = deSkolemize(tp, variance, seen)
diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala
index e572f129b..3566595f2 100644
--- a/src/dotty/tools/dotc/core/SymDenotations.scala
+++ b/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -43,7 +43,7 @@ trait SymDenotations { this: Context =>
if (denot is ValidForever) true
else {
val initial = denot.initial
- if (initial ne denot)
+ if (initial ne denot)
ctx.withPhase(initial.validFor.firstPhaseId).stillValid(initial.asSymDenotation)
else try {
val owner = denot.owner.denot
@@ -79,7 +79,7 @@ object SymDenotations {
super.validFor_=(p)
}
*/
-
+
// ------ Getting and setting fields -----------------------------
private[this] var myFlags: FlagSet = adaptFlags(initFlags)
diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala
index f08183b67..df18813b9 100644
--- a/src/dotty/tools/dotc/core/TypeApplications.scala
+++ b/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -191,8 +191,8 @@ class TypeApplications(val self: Type) extends AnyVal {
if (res.isInstantiatedLambda) res.select(tpnme.Apply) else res
}
}
-
- /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`.
+
+ /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`.
*/
def simplifyApply(implicit ctx: Context): Type = self match {
case self @ TypeRef(prefix, tpnme.Apply) if prefix.isInstantiatedLambda =>
@@ -383,7 +383,7 @@ class TypeApplications(val self: Type) extends AnyVal {
case JavaArrayType(elemtp) => elemtp
case _ => firstBaseArgInfo(defn.SeqClass)
}
-
+
def containsSkolemType(target: Type)(implicit ctx: Context): Boolean = {
def recur(tp: Type): Boolean = tp.stripTypeVar match {
case SkolemType(tp) =>
@@ -404,7 +404,7 @@ class TypeApplications(val self: Type) extends AnyVal {
case _ =>
false
}
- recur(self)
+ recur(self)
}
/** Given a type alias
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index 1687d6159..a59a64a91 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -163,7 +163,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
// Dealiasing is taken care of elsewhere.
val pre1 = tp1.prefix
val pre2 = tp2.prefix
- isSameType(pre1, pre2) ||
+ isSameType(pre1, pre2) ||
sym1.isClass &&
pre2.classSymbol.exists &&
pre2.abstractTypeMembers.isEmpty &&
@@ -176,11 +176,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
!tp1.isInstanceOf[WithFixedSym] &&
!tp2.isInstanceOf[WithFixedSym]
) ||
- compareHK(tp1, tp2, inOrder = true) ||
+ compareHK(tp1, tp2, inOrder = true) ||
compareHK(tp2, tp1, inOrder = false) ||
compareAlias(tp1.info)
case _ =>
- compareHK(tp2, tp1, inOrder = false) ||
+ compareHK(tp2, tp1, inOrder = false) ||
compareAlias(NoType)
}
}
@@ -312,7 +312,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
}
fourthTry(tp1, tp2)
}
-
+
private def thirdTry(tp1: Type, tp2: Type): Boolean = tp2 match {
case tp2: NamedType =>
thirdTryNamed(tp1, tp2)
@@ -453,7 +453,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
case JavaArrayType(elem2) => isSubType(elem1, elem2)
case _ => tp2 isRef ObjectClass
}
- compareJavaArray
+ compareJavaArray
case _ =>
false
}
@@ -518,7 +518,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
false
} else isSubType(tp1, tp2)
- /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of
+ /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of
* the normalized type of the refinement `tp2`?
* Normalization is as follows: If `tp2` contains a skolem to its refinement type,
* rebase both itself and the member info of `tp` on a freshly created skolem type.
@@ -552,16 +552,16 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
}
/** Skip refinements in `tp2` which match corresponding refinements in `tp1`.
- * "Match" means:
- * - they appear in the same order,
- * - they refine the same names,
- * - the refinement in `tp1` is an alias type, and
+ * "Match" means:
+ * - they appear in the same order,
+ * - they refine the same names,
+ * - the refinement in `tp1` is an alias type, and
* - neither refinement refers back to the refined type via a refined this.
* @return The parent type of `tp2` after skipping the matching refinements.
*/
private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match {
case tp1 @ RefinedType(parent1, name1)
- if name1 == tp2.refinedName &&
+ if name1 == tp2.refinedName &&
tp1.refinedInfo.isInstanceOf[TypeAlias] &&
!tp2.refinementRefersToThis &&
!tp1.refinementRefersToThis =>
@@ -672,7 +672,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
case _: PolyType =>
false
case tp2: MethodType =>
- relaxed && tp2.paramNames.isEmpty &&
+ relaxed && tp2.paramNames.isEmpty &&
matchesType(tp1, tp2.resultType, relaxed)
case tp2 =>
relaxed || isSameType(tp1, tp2)
@@ -1118,15 +1118,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
def copyIn(ctx: Context) = new TypeComparer(ctx)
// ----------- Diagnostics --------------------------------------------------
-
+
/** A hook for showing subtype traces. Overridden in ExplainingTypeComparer */
def traceIndented[T](str: String)(op: => T): T = op
-
+
private def traceInfo(tp1: Type, tp2: Type) =
s"${tp1.show} <:< ${tp2.show}" + {
if (ctx.settings.verbose.value || Config.verboseExplainSubtype) {
- s" ${tp1.getClass}, ${tp2.getClass}" +
- (if (frozenConstraint) " frozen" else "") +
+ s" ${tp1.getClass}, ${tp2.getClass}" +
+ (if (frozenConstraint) " frozen" else "") +
(if (ctx.mode is Mode.TypevarsMissContext) " tvars-miss-ctx" else "")
}
else ""
diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala
index e6a81248c..2b6ea49e8 100644
--- a/src/dotty/tools/dotc/core/TypeOps.scala
+++ b/src/dotty/tools/dotc/core/TypeOps.scala
@@ -86,7 +86,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
class SimplifyMap extends TypeMap {
def apply(tp: Type) = simplify(tp, this)
}
-
+
/** Approximate union type by intersection of its dominators.
* See Type#approximateUnion for an explanation.
*/
@@ -330,7 +330,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
}
parentRefs
}
-
+
/** An argument bounds violation is a triple consisting of
* - the argument tree
* - a string "upper" or "lower" indicating which bound is violated
diff --git a/src/dotty/tools/dotc/core/TyperState.scala b/src/dotty/tools/dotc/core/TyperState.scala
index 1079af510..91cda1dd8 100644
--- a/src/dotty/tools/dotc/core/TyperState.scala
+++ b/src/dotty/tools/dotc/core/TyperState.scala
@@ -17,7 +17,7 @@ class TyperState(r: Reporter) extends DotClass with Showable {
def reporter = r
/** The current constraint set */
- def constraint: Constraint =
+ def constraint: Constraint =
new OrderingConstraint(SimpleMap.Empty, SimpleMap.Empty, SimpleMap.Empty)
def constraint_=(c: Constraint): Unit = {}
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index 31567fee0..802a4e406 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -471,7 +471,7 @@ object Types {
go(bounds.hi)
case _ =>
go(next)
- }
+ }
}
def goAnd(l: Type, r: Type) = go(l) & (go(r), pre)
def goOr(l: Type, r: Type) = go(l) | (go(r), pre)
@@ -603,9 +603,9 @@ object Types {
* and matching result types after renaming corresponding parameter types
* if the method types are dependent.
* - Or both types are =:=-equivalent
- * - Or phase.erasedTypes is false, and neither type takes
+ * - Or phase.erasedTypes is false, and neither type takes
* term or type parameters.
- *
+ *
* (*) when matching with a Java method, we also regard Any and Object as equivalent
* parameter types.
*/
@@ -777,9 +777,9 @@ object Types {
* to just U. Does not perform the reduction if the resulting type would contain
* a reference to the "this" of the current refined type. But does follow
* aliases in order to avoid such references. Example:
- *
+ *
* Lambda$I { type $hk$Arg0 = String, type Apply = Lambda$I{...}.$hk$Arg0 } # Apply
- *
+ *
* Here, the refinement for `Apply` has a refined this node, yet dereferencing ones more
* yields `String` as the result of lookupRefined.
*/
@@ -788,7 +788,7 @@ object Types {
case pre: RefinedType =>
if (pre.refinedName ne name) loop(pre.parent)
else pre.refinedInfo match {
- case TypeAlias(tp) =>
+ case TypeAlias(tp) =>
if (!pre.refinementRefersToThis) tp
else tp match {
case TypeRef(SkolemType(`pre`), alias) => lookupRefined(alias)
@@ -1158,7 +1158,7 @@ object Types {
private[this] var lastDenotation: Denotation = _
private[this] var lastSymbol: Symbol = _
private[this] var checkedPeriod = Nowhere
-
+
// Invariants:
// (1) checkedPeriod != Nowhere => lastDenotation != null
// (2) lastDenotation != null => lastSymbol != null
@@ -1286,7 +1286,7 @@ object Types {
checkSymAssign(denot.symbol)
// additional checks that intercept `denot` can be added here
-
+
lastDenotation = denot
lastSymbol = denot.symbol
}
@@ -1737,10 +1737,10 @@ object Types {
extends CachedProxyType with BindingType with ValueType {
val refinedInfo: Type
-
+
private var refinementRefersToThisCache: Boolean = _
private var refinementRefersToThisKnown: Boolean = false
-
+
def refinementRefersToThis(implicit ctx: Context): Boolean = {
if (!refinementRefersToThisKnown) {
refinementRefersToThisCache = refinedInfo.containsSkolemType(this)
@@ -1778,7 +1778,7 @@ object Types {
&& !parent.isLambda)
derivedRefinedType(parent.EtaExpand, refinedName, refinedInfo)
else
- if (false) RefinedType(parent, refinedName, refinedInfo)
+ if (false) RefinedType(parent, refinedName, refinedInfo)
else RefinedType(parent, refinedName, rt => refinedInfo.substSkolem(this, SkolemType(rt)))
}
@@ -1929,11 +1929,11 @@ object Types {
def isJava = false
def isImplicit = false
-
+
private val resType = resultTypeExp(this)
assert(resType.exists)
-
- override def resultType(implicit ctx: Context): Type =
+
+ override def resultType(implicit ctx: Context): Type =
if (dependencyStatus == FalseDeps) { // dealias all false dependencies
val dealiasMap = new TypeMap {
def apply(tp: Type) = tp match {
@@ -1949,28 +1949,28 @@ object Types {
else resType
var myDependencyStatus: DependencyStatus = Unknown
-
+
private def combine(x: DependencyStatus, y: DependencyStatus): DependencyStatus = {
val status = (x & StatusMask) max (y & StatusMask)
val provisional = (x | y) & Provisional
(if (status == TrueDeps) status else status | provisional).toByte
}
-
+
/** The dependency status of this method. Some examples:
- *
+ *
* class C extends { type S; type T = String }
* def f(x: C)(y: Boolean) // dependencyStatus = NoDeps
* def f(x: C)(y: x.S) // dependencyStatus = TrueDeps
- * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e.
+ * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e.
* // dependency can be eliminated by dealiasing.
*/
private def dependencyStatus(implicit ctx: Context): DependencyStatus = {
if (myDependencyStatus != Unknown) myDependencyStatus
else {
val isDepAcc = new TypeAccumulator[DependencyStatus] {
- def apply(x: DependencyStatus, tp: Type) =
+ def apply(x: DependencyStatus, tp: Type) =
if (x == TrueDeps) x
- else
+ else
tp match {
case MethodParam(`thisMethodType`, _) => TrueDeps
case tp @ TypeRef(MethodParam(`thisMethodType`, _), name) =>
@@ -1992,7 +1992,7 @@ object Types {
* which cannot be eliminated by de-aliasing?
*/
def isDependent(implicit ctx: Context): Boolean = dependencyStatus == TrueDeps
-
+
protected def computeSignature(implicit ctx: Context): Signature =
resultSignature.prepend(paramTypes, isJava)
@@ -2071,7 +2071,7 @@ object Types {
object MethodType extends MethodTypeCompanion {
def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
unique(new CachedMethodType(paramNames, paramTypes)(resultTypeExp))
-
+
private type DependencyStatus = Byte
private final val Unknown: DependencyStatus = 0 // not yet computed
private final val NoDeps: DependencyStatus = 1 // no dependent parameters found
@@ -2116,7 +2116,7 @@ object Types {
val paramBounds = paramBoundsExp(this)
val resType = resultTypeExp(this)
-
+
override def resultType(implicit ctx: Context) = resType
protected def computeSignature(implicit ctx: Context) = resultSignature
@@ -2234,7 +2234,7 @@ object Types {
type BT = Type
override def underlying(implicit ctx: Context) = binder
def copyBoundType(bt: BT) = SkolemType(bt)
-
+
// need to customize hashCode and equals to prevent infinite recursion for
// refinements that refer to the refinement type via this
override def computeHash = addDelta(binder.identityHash, 41)
@@ -2263,7 +2263,7 @@ object Types {
* @param owningTree The function part of the TypeApply tree tree that introduces
* the type variable.
* @paran owner The current owner if the context where the variable was created.
- *
+ *
* `owningTree` and `owner` are used to determine whether a type-variable can be instantiated
* at some given point. See `Inferencing#interpolateUndetVars`.
*/
@@ -2599,7 +2599,7 @@ object Types {
if ((annot eq this.annot) && (tpe eq this.tpe)) this
else AnnotatedType(annot, tpe)
- override def stripTypeVar(implicit ctx: Context): Type =
+ override def stripTypeVar(implicit ctx: Context): Type =
derivedAnnotatedType(annot, tpe.stripTypeVar)
override def stripAnnots(implicit ctx: Context): Type = tpe.stripAnnots
}
@@ -2871,7 +2871,7 @@ object Types {
protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations
protected var variance = 1
-
+
protected def applyToPrefix(x: T, tp: NamedType) = {
val saved = variance
variance = 0
@@ -2879,7 +2879,7 @@ object Types {
variance = saved
result
}
-
+
def foldOver(x: T, tp: Type): T = tp match {
case tp: TypeRef =>
if (stopAtStatic && tp.symbol.isStatic) x
@@ -3073,7 +3073,7 @@ object Types {
// ----- Debug ---------------------------------------------------------
var debugTrace = false
-
+
val watchList = List[String](
) map (_.toTypeName)
diff --git a/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala b/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala
index 52ea7ba38..935c94055 100644
--- a/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala
+++ b/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala
@@ -139,7 +139,7 @@ class ClassfileParser(
if (companionClassMethod.exists) companionClassMethod.entered
val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, moduleRoot, classRoot)
if (companionModuleMethod.exists) companionModuleMethod.entered
-
+
setClassInfo(classRoot, classInfo)
setClassInfo(moduleRoot, staticInfo)
}
@@ -590,7 +590,7 @@ class ClassfileParser(
val targs = tparams.map(_.typeRef)
val paramNames = attrs.map(_.name.asTermName)
val paramTypes = attrs.map(_.info.resultType)
-
+
def addConstr(ptypes: List[Type]) = {
val mtype = MethodType(paramNames, ptypes, classRoot.typeRef.appliedTo(targs))
val constrType = if (tparams.isEmpty) mtype else TempPolyType(tparams, mtype)
@@ -606,15 +606,15 @@ class ClassfileParser(
addDefaultGetter(attr, i)
}
}
-
+
addConstr(paramTypes)
if (paramTypes.nonEmpty)
paramTypes.last match {
- case defn.ArrayType(elemtp) =>
- addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
+ case defn.ArrayType(elemtp) =>
+ addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
case _ =>
}
-
+
}
}
diff --git a/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala
index 84a9a1744..64be68975 100644
--- a/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala
@@ -23,14 +23,14 @@ class DottyUnpickler(bytes: Array[Byte]) {
private val unpickler = new TastyUnpickler(bytes)
private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler).get
-
+
/** Enter all toplevel classes and objects into their scopes
* @param roots a set of SymDenotations that should be overwritten by unpickling
*/
- def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
+ def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
treeUnpickler.enterTopLevel(roots)
-
- /** The unpickled trees
+
+ /** The unpickled trees
* @param readPositions if true, trees get decorated with position information.
*/
def body(readPositions: Boolean = false)(implicit ctx: Context): List[Tree] = {
@@ -44,7 +44,7 @@ class DottyUnpickler(bytes: Array[Byte]) {
def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
new TreeUnpickler(reader, tastyName)
}
-
+
private class PositionsSectionUnpickler extends SectionUnpickler[(Position, AddrToPosition)]("Positions") {
def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
new PositionUnpickler(reader).unpickle()
diff --git a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala
index a60767fe6..2a6239c5a 100644
--- a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala
@@ -24,14 +24,14 @@ class NameBuffer extends TastyBuffer(100000) {
ref
}
def nameIndex(name: Name): NameRef = {
- val tname =
+ val tname =
if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed))
else Simple(name.toTermName)
nameIndex(tname)
}
-
+
def nameIndex(str: String): NameRef = nameIndex(str.toTermName)
-
+
def fullNameIndex(name: Name): NameRef = {
val pos = name.lastIndexOf('.')
if (pos > 0)
@@ -39,7 +39,7 @@ class NameBuffer extends TastyBuffer(100000) {
else
nameIndex(name)
}
-
+
private def withLength(op: => Unit): Unit = {
val lengthAddr = currentAddr
writeByte(0)
@@ -48,12 +48,12 @@ class NameBuffer extends TastyBuffer(100000) {
assert(length < 128)
putNat(lengthAddr, length, 1)
}
-
+
def writeNameRef(ref: NameRef) = writeNat(ref.index)
-
+
def pickleName(name: TastyName): Unit = name match {
- case Simple(name) =>
- val bytes =
+ case Simple(name) =>
+ val bytes =
if (name.length == 0) new Array[Byte](0)
else Codec.toUTF8(chrs, name.start, name.length)
writeByte(UTF8)
@@ -62,7 +62,7 @@ class NameBuffer extends TastyBuffer(100000) {
case Qualified(qualified, selector) =>
writeByte(QUALIFIED)
withLength { writeNameRef(qualified); writeNameRef(selector) }
- case Signed(original, params, result) =>
+ case Signed(original, params, result) =>
writeByte(SIGNED)
withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) }
case Expanded(prefix, original) =>
@@ -81,7 +81,7 @@ class NameBuffer extends TastyBuffer(100000) {
writeByte(SHADOWED)
withLength { writeNameRef(original) }
}
-
+
override def assemble(): Unit = {
var i = 0
for ((name, ref) <- nameRefs) {
diff --git a/src/dotty/tools/dotc/core/pickling/PositionPickler.scala b/src/dotty/tools/dotc/core/pickling/PositionPickler.scala
index d791e37d2..e8a0b3d01 100644
--- a/src/dotty/tools/dotc/core/pickling/PositionPickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/PositionPickler.scala
@@ -13,13 +13,13 @@ import TastyBuffer._
import util.Positions._
object PositionPickler {
-
+
trait DeferredPosition {
var parentPos: Position = NoPosition
}
- def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit =
- if (parentPos.exists)
+ def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit =
+ if (parentPos.exists)
x match {
case x: Tree @unchecked =>
op(x, parentPos)
@@ -33,7 +33,7 @@ object PositionPickler {
case xs: TraversableOnce[_] =>
xs.foreach(traverse(_, parentPos, op))
case _ =>
- }
+ }
}
import PositionPickler._
@@ -41,18 +41,18 @@ class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) {
val buf = new TastyBuffer(100000)
pickler.newSection("Positions", buf)
import buf._
-
+
def picklePositions(roots: List[Tree], totalRange: Position)(implicit ctx: Context) = {
var lastIndex = 0
def record(tree: Tree, parentPos: Position): Unit =
if (tree.pos.exists) {
def msg = s"failure to pickle $tree at ${tree.pos}, parent = $parentPos"
- val endPos = tree.pos.end min parentPos.end
+ val endPos = tree.pos.end min parentPos.end
// end positions can be larger than their parents
// e.g. in the case of synthetic empty ranges, which are placed at the next token after
// the current construct.
val endDelta = endPos - parentPos.end
- val startPos =
+ val startPos =
if (endDelta == 0) tree.pos.start max parentPos.start else tree.pos.start min endPos
// Since end positions are corrected above, start positions have to follow suit.
val startDelta = startPos - parentPos.start
@@ -68,7 +68,7 @@ class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) {
assert(startDelta >= 0, msg)
}
}
-
+
buf.writeNat(totalRange.end)
traverse(roots, totalRange, record)
}
diff --git a/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala b/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala
index 782e871c0..cfcc4a835 100644
--- a/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala
@@ -24,11 +24,11 @@ class PositionUnpickler(reader: TastyReader) {
while (!isAtEnd) {
val delta1 = readDelta()
val delta2 = readDelta()
- val (startDelta, endDelta, indexDelta) =
+ val (startDelta, endDelta, indexDelta) =
if (delta2 <= 0) (delta1, -delta2, readDelta())
else if (delta1 < 0) (0, -delta1, delta2)
else (delta1, 0, delta2)
- positions(curIndex) = Position(startDelta, endDelta, startDelta)
+ positions(curIndex) = Position(startDelta, endDelta, startDelta)
// make non-synthetic position; will be made synthetic by normalization.
curIndex += indexDelta
}
diff --git a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala
index 9197a2acc..99ae331d0 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala
@@ -6,19 +6,19 @@ package pickling
import util.Util.dble
object TastyBuffer {
-
+
/** The number of digits of the natural number `nat`, written in base 128 format. */
- def natSize(nat: Int): Int =
+ def natSize(nat: Int): Int =
if (nat < 128) 1 else natSize(nat >>> 7) + 1
/** An address pointing to an index in a Tasty buffer's byte array */
case class Addr(val index: Int) extends AnyVal {
def -(delta: Int): Addr = Addr(this.index - delta)
def +(delta: Int): Addr = Addr(this.index + delta)
-
+
def relativeTo(base: Addr): Addr = this - base.index - AddrWidth
}
-
+
val NoAddr = Addr(-1)
/** The maximal number of address bytes.
@@ -33,13 +33,13 @@ import TastyBuffer._
* and that supports reading and patching addresses represented as natural numbers.
*/
class TastyBuffer(initialSize: Int) {
-
+
/** The current byte array, will be expanded as needed */
var bytes = new Array[Byte](initialSize)
-
+
/** The number of bytes written */
var length = 0
-
+
// -- Output routines --------------------------------------------
/** Write a byte of data. */
@@ -48,7 +48,7 @@ class TastyBuffer(initialSize: Int) {
bytes(length) = b.toByte
length += 1
}
-
+
/** Write the first `n` bytes of `data`. */
def writeBytes(data: Array[Byte], n: Int): Unit = {
while (bytes.length < length + n) bytes = dble(bytes)
@@ -61,13 +61,13 @@ class TastyBuffer(initialSize: Int) {
*/
def writeNat(x: Int): Unit =
writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
-
+
/** Write a natural number in 2's complement big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
- def writeInt(x: Int): Unit =
+ def writeInt(x: Int): Unit =
writeLongInt(x)
-
+
/**
* Like writeNat, but for longs. Note that the
* binary representation of LongNat is identical to Nat
@@ -84,7 +84,7 @@ class TastyBuffer(initialSize: Int) {
if (y != 0L) writePrefix(y)
writeByte(((x & 0x7f) | 0x80).toInt)
}
-
+
/** Like writeInt, but for longs */
def writeLongInt(x: Long): Unit = {
def writePrefix(x: Long): Unit = {
@@ -94,22 +94,22 @@ class TastyBuffer(initialSize: Int) {
}
val y = x >> 7
if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
- writeByte(((x & 0x7f) | 0x80).toInt)
+ writeByte(((x & 0x7f) | 0x80).toInt)
}
-
+
/** Write an uncompressed Long stored in 8 bytes in big endian format */
def writeUncompressedLong(x: Long): Unit = {
var y = x
val bytes = new Array[Byte](8)
for (i <- 7 to 0 by -1) {
bytes(i) = (y & 0xff).toByte
- y = y >>> 8
+ y = y >>> 8
}
writeBytes(bytes, 8)
}
// -- Address handling --------------------------------------------
-
+
/** Write natural number `x` right-adjusted in a field of `width` bytes
* starting with address `at`.
*/
@@ -125,10 +125,10 @@ class TastyBuffer(initialSize: Int) {
}
assert(y == 0, s"number $x too large to fit in $width bytes")
}
-
+
/** The byte at given address */
def getByte(at: Addr): Int = bytes(at.index)
-
+
/** The natural number at address `at` */
def getNat(at: Addr): Int = getLongNat(at).toInt
@@ -148,8 +148,8 @@ class TastyBuffer(initialSize: Int) {
/** The address (represented as a natural number) at address `at` */
def getAddr(at: Addr) = Addr(getNat(at))
- /** The smallest address equal to or following `at` which points to a non-zero byte */
- final def skipZeroes(at: Addr): Addr =
+ /** The smallest address equal to or following `at` which points to a non-zero byte */
+ final def skipZeroes(at: Addr): Addr =
if (getByte(at) != 0) at else skipZeroes(at + 1)
/** The address after the natural number found at address `at`. */
@@ -160,21 +160,21 @@ class TastyBuffer(initialSize: Int) {
/** The address referring to the end of data written so far */
def currentAddr: Addr = Addr(length)
-
+
/** Reserve `AddrWidth` bytes to write an address into */
def reserveAddr(): Addr = {
val result = currentAddr
length += AddrWidth
result
}
-
+
/** Fill reserved space at address `at` with address `target` */
- def fillAddr(at: Addr, target: Addr) =
+ def fillAddr(at: Addr, target: Addr) =
putNat(at, target.index, AddrWidth)
-
+
/** Write address without leading zeroes */
def writeAddr(addr: Addr): Unit = writeNat(addr.index)
-
+
// -- Finalization --------------------------------------------
/** Hook to be overridden in subclasses.
diff --git a/src/dotty/tools/dotc/core/pickling/TastyName.scala b/src/dotty/tools/dotc/core/pickling/TastyName.scala
index e8f30a234..e47ff9fc4 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyName.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyName.scala
@@ -9,22 +9,22 @@ import collection.mutable
abstract class TastyName
object TastyName {
-
+
case class NameRef(val index: Int) extends AnyVal
-
+
case class Simple(name: TermName) extends TastyName
case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName
- case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
+ case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
case class Expanded(prefix: NameRef, original: NameRef) extends TastyName
case class ModuleClass(module: NameRef) extends TastyName
case class SuperAccessor(accessed: NameRef) extends TastyName
case class DefaultGetter(method: NameRef, num: Int) extends TastyName
case class Shadowed(original: NameRef) extends TastyName
-
+
class Table extends (NameRef => TastyName) {
private val names = new mutable.ArrayBuffer[TastyName]
def add(name: TastyName) = names += name
def apply(ref: NameRef) = names(ref.index)
def contents: Iterable[TastyName] = names
}
-}
+}
diff --git a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala
index f998cf377..6bd6f1c44 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala
@@ -9,9 +9,9 @@ import TastyBuffer._
import java.util.UUID
class TastyPickler {
-
+
private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)]
-
+
private val headerBuffer = {
val buf = new TastyBuffer(24)
for (ch <- header) buf.writeByte(ch.toByte)
@@ -24,17 +24,17 @@ class TastyPickler {
}
val nameBuffer = new NameBuffer
-
- def newSection(name: String, buf: TastyBuffer) =
+
+ def newSection(name: String, buf: TastyBuffer) =
sections += ((nameBuffer.nameIndex(name), buf))
-
+
def assembleParts(): Array[Byte] = {
def lengthWithLength(buf: TastyBuffer) = {
buf.assemble()
buf.length + natSize(buf.length)
}
- val totalSize =
- headerBuffer.length +
+ val totalSize =
+ headerBuffer.length +
lengthWithLength(nameBuffer) + {
for ((nameRef, buf) <- sections) yield
natSize(nameRef.index) + lengthWithLength(buf)
diff --git a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala
index 37a1e3b40..9d07fc5da 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala
@@ -12,14 +12,14 @@ import util.Positions.{Position, offsetToInt}
import collection.mutable
class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
-
+
val unpickler = new TastyUnpickler(bytes)
import unpickler.{tastyName, unpickle}
-
+
def nameToString(name: TastyName): String = name match {
case Simple(name) => name.toString
case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name)
- case Signed(original, params, result) =>
+ case Signed(original, params, result) =>
i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}"
case Expanded(prefix, original) => s"$prefix${nme.EXPAND_SEPARATOR}$original"
case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS"
@@ -27,13 +27,13 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num
case Shadowed(original) => nameRefToString(original) + "/SHADOWED"
}
-
+
def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref))
-
- def printNames() =
+
+ def printNames() =
for ((name, idx) <- tastyName.contents.zipWithIndex)
println(f"$idx%4d: " + nameToString(name))
-
+
def printContents(): Unit = {
println("Names:")
printNames()
@@ -41,7 +41,7 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
unpickle(new TreeSectionUnpickler)
unpickle(new PositionSectionUnpickler)
}
-
+
class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") {
import PickleFormat._
def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
@@ -66,12 +66,12 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
tag match {
case RENAMED =>
printName(); printName()
- case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
printName(); printTrees()
case REFINEDtype =>
printTree(); printName(); printTrees()
case RETURN =>
- printNat(); printTrees()
+ printNat(); printTrees()
case METHODtype | POLYtype =>
printTree()
until(end) { printName(); printTree() }
@@ -85,16 +85,16 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
goto(end)
}
}
- else if (tag >= firstNatASTTreeTag) {
+ else if (tag >= firstNatASTTreeTag) {
tag match {
case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName()
- case _ => printNat()
+ case _ => printNat()
}
printTree()
}
- else if (tag >= firstASTTreeTag)
+ else if (tag >= firstASTTreeTag)
printTree()
- else if (tag >= firstNatTreeTag)
+ else if (tag >= firstNatTreeTag)
tag match {
case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName()
case _ => printNat()
diff --git a/src/dotty/tools/dotc/core/pickling/TastyReader.scala b/src/dotty/tools/dotc/core/pickling/TastyReader.scala
index 0385e9adb..35724e557 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyReader.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyReader.scala
@@ -10,49 +10,49 @@ import collection.mutable
/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
* and that supports reading and patching addresses represented as natural numbers.
- *
+ *
* @param bytes The array containing data
* @param from The position from which to read
* @param end The position one greater than the last byte to be read
- * @param base The index referenced by the logical zero address Addr(0)
+ * @param base The index referenced by the logical zero address Addr(0)
*/
class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) {
-
+
def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length)
-
+
private var bp: Int = start
-
+
def addr(idx: Int) = Addr(idx - base)
def index(addr: Addr) = addr.index + base
-
+
/** The address of the first byte to read, respectively byte that was read */
def startAddr: Addr = addr(start)
-
+
/** The address of the next byte to read */
def currentAddr: Addr = addr(bp)
-
+
/** the address one greater than the last brte to read */
def endAddr: Addr = addr(end)
-
+
/** Have all bytes been read? */
def isAtEnd: Boolean = bp == end
-
+
/** A new reader over the same array with the same address base, but with
* specified start and end positions
*/
- def subReader(start: Addr, end: Addr): TastyReader =
+ def subReader(start: Addr, end: Addr): TastyReader =
new TastyReader(bytes, index(start), index(end), base)
-
+
/** Read a byte of data. */
def readByte(): Int = {
val result = bytes(bp) & 0xff
bp += 1
result
}
-
- /** Returns the next byte of data as a natural number without advancing the read position */
+
+ /** Returns the next byte of data as a natural number without advancing the read position */
def nextByte: Int = bytes(bp) & 0xff
-
+
/** Read the next `n` bytes of `data`. */
def readBytes(n: Int): Array[Byte] = {
val result = new Array[Byte](n)
@@ -65,12 +65,12 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
* All but the last digits have bit 0x80 set.
*/
def readNat(): Int = readLongNat.toInt
-
+
/** Read an integer number in 2's complement big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
def readInt(): Int = readLongInt.toInt
-
+
/** Read a natural number fitting in a Long in big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
@@ -84,7 +84,7 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
} while ((b & 0x80) == 0)
x
}
-
+
/** Read a long integer number in 2's complement big endian format, base 128. */
def readLongInt(): Long = {
var b = bytes(bp)
@@ -95,9 +95,9 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
x = (x << 7) | (b & 0x7f)
bp += 1
}
- x
+ x
}
-
+
/** Read an uncompressed Long stored in 8 bytes in big endian format */
def readUncompressedLong(): Long = {
var x = 0
@@ -105,22 +105,22 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
x = (x << 8) | (readByte() & 0xff)
x
}
-
+
/** Read a natural number and return as a NameRef */
def readNameRef() = NameRef(readNat())
-
- /** Read a natural number and return as an address */
+
+ /** Read a natural number and return as an address */
def readAddr() = Addr(readNat())
-
+
/** Read a length number and return the absolute end address implied by it,
* given as <address following length field> + <length-value-read>.
*/
def readEnd(): Addr = addr(readNat() + bp)
-
+
/** Set read position to the one pointed to by `addr` */
- def goto(addr: Addr): Unit =
+ def goto(addr: Addr): Unit =
bp = index(addr)
-
+
/** Perform `op` until `end` address is reached and collect results in a list. */
def until[T](end: Addr)(op: => T): List[T] = {
val buf = new mutable.ListBuffer[T]
diff --git a/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala
index b5e978afa..5fbb85768 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala
@@ -7,9 +7,9 @@ import PickleFormat._
import Names.{Name, termName}
import java.util.UUID
-object TastyUnpickler {
+object TastyUnpickler {
class UnpickleException(msg: String) extends Exception(msg)
-
+
abstract class SectionUnpickler[R](val name: String) {
def unpickle(reader: TastyReader, tastyName: TastyName.Table): R
}
@@ -19,28 +19,28 @@ import TastyUnpickler._
class TastyUnpickler(reader: TastyReader) {
import reader._
-
+
def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
-
+
private val sectionReader = new mutable.HashMap[String, TastyReader]
val tastyName = new TastyName.Table
-
- def check(cond: Boolean, msg: => String) =
+
+ def check(cond: Boolean, msg: => String) =
if (!cond) throw new UnpickleException(msg)
-
+
def readString(): String = {
val TastyName.Simple(name) = tastyName(readNameRef())
name.toString
}
-
+
def readName(): TastyName = {
import TastyName._
- val tag = readByte()
+ val tag = readByte()
val length = readNat()
val start = currentAddr
val end = start + length
val result = tag match {
- case UTF8 =>
+ case UTF8 =>
goto(end)
Simple(termName(bytes, start.index, length))
case QUALIFIED =>
@@ -64,21 +64,21 @@ class TastyUnpickler(reader: TastyReader) {
assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
result
}
-
+
private def readHeader(): UUID = {
for (i <- 0 until header.length)
check(readByte() == header(i), "not a TASTy file")
val major = readNat()
val minor = readNat()
- check(major == MajorVersion && minor <= MinorVersion,
+ check(major == MajorVersion && minor <= MinorVersion,
s"""TASTy signature has wrong version.
| expected: $MajorVersion.$MinorVersion
| found : $major.$minor""".stripMargin)
new UUID(readUncompressedLong(), readUncompressedLong())
}
-
+
val uuid = readHeader()
-
+
locally {
until(readEnd()) { tastyName.add(readName()) }
while (!isAtEnd) {
@@ -88,8 +88,8 @@ class TastyUnpickler(reader: TastyReader) {
goto(secEnd)
}
}
-
- def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
+
+ def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
for (reader <- sectionReader.get(sec.name)) yield
sec.unpickle(reader, tastyName)
}
diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala
index c1eae5014..c224fc30b 100644
--- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala
@@ -11,20 +11,20 @@ import ast.tpd.Tree
class TreeBuffer extends TastyBuffer(1000000) {
private final val ItemsOverOffsets = 2
-
+
private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
private var offsets = new Array[Int](initialOffsetSize)
private var isRelative = new Array[Boolean](initialOffsetSize)
private var delta: Array[Int] = _
private var numOffsets = 0
- private[pickling] val pickledTrees = new java.util.IdentityHashMap[Tree, Any] // Value type is really Addr, but that's not compatible with null
-
+ private[pickling] val pickledTrees = new java.util.IdentityHashMap[Tree, Any] // Value type is really Addr, but that's not compatible with null
+
def addrOfTree(tree: Tree): Option[Addr] = pickledTrees.get(tree) match {
case null => None
case n => Some(n.asInstanceOf[Addr])
}
-
+
private def offset(i: Int): Addr = Addr(offsets(i))
private def keepOffset(relative: Boolean): Unit = {
@@ -36,7 +36,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
isRelative(numOffsets) = relative
numOffsets += 1
}
-
+
/** Reserve space for a reference, to be adjusted later */
def reserveRef(relative: Boolean): Addr = {
val addr = currentAddr
@@ -50,19 +50,19 @@ class TreeBuffer extends TastyBuffer(1000000) {
keepOffset(relative = false)
fillAddr(reserveAddr(), target)
}
-
+
/** Fill previously reserved field with a reference */
def fillRef(at: Addr, target: Addr, relative: Boolean) = {
val addr = if (relative) target.relativeTo(at) else target
fillAddr(at, addr)
}
-
+
/** The amount by which the bytes at the given address are shifted under compression */
def deltaAt(at: Addr): Int = {
val idx = bestFit(offsets, numOffsets, at.index - 1)
if (idx < 0) 0 else delta(idx)
}
-
+
/** The address to which `x` is translated under compression */
def adjusted(x: Addr): Addr = x - deltaAt(x)
@@ -77,11 +77,11 @@ class TreeBuffer extends TastyBuffer(1000000) {
val skippedCount = skippedOff.index - off.index
assert(skippedCount < AddrWidth, s"unset field at position $off")
lastDelta += skippedCount
- delta(i) = lastDelta
+ delta(i) = lastDelta
i += 1
}
}
-
+
/** The absolute or relative adjusted address at index `i` of `offsets` array*/
private def adjustedOffset(i: Int): Addr = {
val at = offset(i)
@@ -90,12 +90,12 @@ class TreeBuffer extends TastyBuffer(1000000) {
val start = skipNat(at)
val len1 = original + delta(i) - deltaAt(original + start.index)
val len2 = adjusted(original + start.index) - adjusted(start).index
- assert(len1 == len2,
+ assert(len1 == len2,
s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2")
len1
} else adjusted(original)
}
-
+
/** Adjust all offsets according to previously computed deltas */
private def adjustOffsets(): Unit = {
for (i <- 0 until numOffsets) {
@@ -103,7 +103,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
fillAddr(offset(i), corrected)
}
}
-
+
/** Adjust deltas to also take account references that will shrink (and thereby
* generate additional zeroes that can be skipped) due to previously
* computed adjustements.
@@ -118,13 +118,13 @@ class TreeBuffer extends TastyBuffer(1000000) {
delta1(i) = lastDelta
i += 1
}
- val saved =
+ val saved =
if (numOffsets == 0) 0
else delta1(numOffsets - 1) - delta(numOffsets - 1)
delta = delta1
saved
}
-
+
/** Compress pickle buffer, shifting bytes to close all skipped zeroes. */
private def compress(): Int = {
var lastDelta = 0
@@ -147,7 +147,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
length -= lastDelta
wasted
}
-
+
def adjustPickledTrees(): Unit = {
val it = pickledTrees.keySet.iterator
while (it.hasNext) {
@@ -155,7 +155,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
pickledTrees.put(tree, adjusted(pickledTrees.get(tree).asInstanceOf[Addr]))
}
}
-
+
/** Final assembly, involving the following steps:
* - compute deltas
* - adjust deltas until additional savings are < 1% of total
diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala
index 85addc563..53dd34094 100644
--- a/src/dotty/tools/dotc/core/pickling/TreePickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala
@@ -33,7 +33,7 @@ class TreePickler(pickler: TastyPickler) {
}
def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match {
- case tree: MemberDef =>
+ case tree: MemberDef =>
if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr
case _ =>
}
@@ -54,13 +54,13 @@ class TreePickler(pickler: TastyPickler) {
val Signature(params, result) = sig
pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result)))
}
-
+
private def pickleName(sym: Symbol)(implicit ctx: Context): Unit =
- if (sym is Flags.ExpandedName)
+ if (sym is Flags.ExpandedName)
pickleName(TastyName.Expanded(
nameIndex(sym.name.expandedPrefix), nameIndex(sym.name.unexpandedName)))
else pickleName(sym.name)
-
+
private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
case Some(label) =>
if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
@@ -68,20 +68,20 @@ class TreePickler(pickler: TastyPickler) {
ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
pickleForwardSymRef(sym)
}
-
+
private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = {
val ref = reserveRef(relative = false)
assert(!sym.is(Flags.Package), sym)
- forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
+ forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
}
-
+
private def isLocallyDefined(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
case Some(label) => assert(sym.exists); label != NoAddr
case None => false
}
def pickle(trees: List[Tree])(implicit ctx: Context) = {
-
+
def qualifiedName(sym: Symbol): TastyName =
if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName)
else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name))
@@ -141,9 +141,9 @@ class TreePickler(pickler: TastyPickler) {
println(i"error when pickling type $tpe0")
throw ex
}
-
+
def pickleNewType(tpe: Type, richTypes: Boolean): Unit = try { tpe match {
- case ConstantType(value) =>
+ case ConstantType(value) =>
pickleConstant(value)
case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
pickleType(tpe.info.bounds.hi)
@@ -152,12 +152,12 @@ class TreePickler(pickler: TastyPickler) {
if (sym.is(Flags.Package)) {
writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg)
pickleName(qualifiedName(sym))
- }
+ }
else {
assert(tpe.prefix == NoPrefix)
def pickleRef() = {
writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
- pickleSymRef(sym)
+ pickleSymRef(sym)
}
if (sym is Flags.BindDefinedType) {
registerDef(sym)
@@ -175,14 +175,14 @@ class TreePickler(pickler: TastyPickler) {
pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix)
case tpe: NamedType =>
if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda)
- // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will
+ // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will
// be reconstituted when unpickling.
pickleType(tpe.prefix)
else if (isLocallyDefined(tpe.symbol)) {
writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol)
pickleSymRef(tpe.symbol); pickleType(tpe.prefix)
}
- else {
+ else {
writeByte(if (tpe.isType) TYPEREF else TERMREF)
pickleName(tpe.name); pickleType(tpe.prefix)
}
@@ -199,10 +199,10 @@ class TreePickler(pickler: TastyPickler) {
val args = tpe.argInfos(interpolate = false)
if (args.isEmpty) {
writeByte(REFINEDtype)
- withLength {
+ withLength {
pickleType(tpe.parent)
pickleName(tpe.refinedName)
- pickleType(tpe.refinedInfo, richTypes = true)
+ pickleType(tpe.refinedInfo, richTypes = true)
}
}
else {
@@ -211,8 +211,8 @@ class TreePickler(pickler: TastyPickler) {
}
case tpe: TypeAlias =>
writeByte(TYPEALIAS)
- withLength {
- pickleType(tpe.alias, richTypes)
+ withLength {
+ pickleType(tpe.alias, richTypes)
tpe.variance match {
case 1 => writeByte(COVARIANT)
case -1 => writeByte(CONTRAVARIANT)
@@ -237,7 +237,7 @@ class TreePickler(pickler: TastyPickler) {
case tpe: PolyType if richTypes =>
writeByte(POLYtype)
pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramBounds)
- case tpe: PolyParam =>
+ case tpe: PolyParam =>
if (!pickleParamType(tpe))
// TODO figure out why this case arises in e.g. pickling AbstractFileReader.
ctx.typerState.constraint.entry(tpe) match {
@@ -249,19 +249,19 @@ class TreePickler(pickler: TastyPickler) {
case tpe: LazyRef =>
pickleType(tpe.ref)
}} catch {
- case ex: AssertionError =>
+ case ex: AssertionError =>
println(i"error while pickling type $tpe")
throw ex
}
-
- def pickleMethodic(result: Type, names: List[Name], types: List[Type]) =
+
+ def pickleMethodic(result: Type, names: List[Name], types: List[Type]) =
withLength {
pickleType(result, richTypes = true)
(names, types).zipped.foreach { (name, tpe) =>
- pickleName(name); pickleType(tpe)
+ pickleName(name); pickleType(tpe)
}
}
-
+
def pickleParamType(tpe: ParamType): Boolean = {
val binder = pickledTypes.get(tpe.binder)
val pickled = binder != null
@@ -271,10 +271,10 @@ class TreePickler(pickler: TastyPickler) {
}
pickled
}
-
+
def pickleTpt(tpt: Tree): Unit = pickleType(tpt.tpe) // TODO correlate with original when generating positions
-
- def pickleTreeUnlessEmpty(tree: Tree): Unit =
+
+ def pickleTreeUnlessEmpty(tree: Tree): Unit =
if (!tree.isEmpty) pickleTree(tree)
def pickleTree(tree: Tree): Unit = try {
@@ -283,14 +283,14 @@ class TreePickler(pickler: TastyPickler) {
case Ident(name) =>
tree.tpe match {
case tp: TermRef => pickleType(tp)
- case _ =>
+ case _ =>
writeByte(IDENT)
pickleName(name)
pickleType(tree.tpe)
}
- case This(_) =>
+ case This(_) =>
pickleType(tree.tpe)
- case Select(qual, name) =>
+ case Select(qual, name) =>
writeByte(SELECT)
val realName = tree.tpe match {
case tp: NamedType if tp.name.isShadowedName => tp.name
@@ -321,7 +321,7 @@ class TreePickler(pickler: TastyPickler) {
}
case Super(qual, mix) =>
writeByte(SUPER)
- withLength {
+ withLength {
pickleTree(qual);
if (!mix.isEmpty) {
val SuperType(_, mixinType) = tree.tpe
@@ -350,12 +350,12 @@ class TreePickler(pickler: TastyPickler) {
case If(cond, thenp, elsep) =>
writeByte(IF)
withLength{ pickleTree(cond); pickleTree(thenp); pickleTree(elsep) }
- case Closure(env, meth, tpt) =>
+ case Closure(env, meth, tpt) =>
writeByte(LAMBDA)
assert(env.isEmpty)
- withLength{
+ withLength{
pickleTree(meth)
- if (tpt.tpe.exists) pickleTpt(tpt)
+ if (tpt.tpe.exists) pickleTpt(tpt)
}
case Match(selector, cases) =>
writeByte(MATCH)
@@ -383,14 +383,14 @@ class TreePickler(pickler: TastyPickler) {
withLength { alts.foreach(pickleTree) }
case UnApply(fun, implicits, patterns) =>
writeByte(UNAPPLY)
- withLength {
+ withLength {
pickleTree(fun)
for (implicitArg <- implicits) {
writeByte(IMPLICITarg)
pickleTree(implicitArg)
}
pickleType(tree.tpe)
- patterns.foreach(pickleTree)
+ patterns.foreach(pickleTree)
}
case tree: ValDef =>
pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs)
@@ -400,7 +400,7 @@ class TreePickler(pickler: TastyPickler) {
for (vparams <- tree.vparamss) {
writeByte(PARAMS)
withLength { pickleParams(vparams) }
- }
+ }
}
pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams)
case tree: TypeDef =>
@@ -410,7 +410,7 @@ class TreePickler(pickler: TastyPickler) {
writeByte(TEMPLATE)
val (params, rest) = tree.body partition {
case stat: TypeDef => stat.symbol is Flags.Param
- case stat: ValOrDefDef =>
+ case stat: ValOrDefDef =>
stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter
case _ => false
}
@@ -435,7 +435,7 @@ class TreePickler(pickler: TastyPickler) {
withLength {
pickleTree(expr)
selectors foreach {
- case Pair(Ident(from), Ident(to)) =>
+ case Pair(Ident(from), Ident(to)) =>
writeByte(RENAMED)
withLength { pickleName(from); pickleName(to) }
case Ident(name) =>
@@ -468,13 +468,13 @@ class TreePickler(pickler: TastyPickler) {
pickleModifiers(sym)
}
}
-
+
def pickleParam(tree: Tree): Unit = tree match {
case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt)
case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs)
- case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
+ case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
}
-
+
def pickleParams(trees: List[Tree]): Unit = {
trees.foreach(preRegister)
trees.foreach(pickleParam)
@@ -504,7 +504,7 @@ class TreePickler(pickler: TastyPickler) {
if (flags is Local) writeByte(LOCAL)
if (flags is Synthetic) writeByte(SYNTHETIC)
if (flags is Artifact) writeByte(ARTIFACT)
- if (flags is Scala2x) writeByte(SCALA2X)
+ if (flags is Scala2x) writeByte(SCALA2X)
if (flags is InSuperCall) writeByte(INSUPERCALL)
if (sym.isTerm) {
if (flags is Implicit) writeByte(IMPLICIT)
@@ -512,18 +512,18 @@ class TreePickler(pickler: TastyPickler) {
if (flags is AbsOverride) writeByte(ABSOVERRIDE)
if (flags is Mutable) writeByte(MUTABLE)
if (flags is Accessor) writeByte(FIELDaccessor)
- if (flags is CaseAccessor) writeByte(CASEaccessor)
+ if (flags is CaseAccessor) writeByte(CASEaccessor)
if (flags is DefaultParameterized) writeByte(DEFAULTparameterized)
} else {
if (flags is Sealed) writeByte(SEALED)
- if (flags is Abstract) writeByte(ABSTRACT)
+ if (flags is Abstract) writeByte(ABSTRACT)
if (flags is Trait) writeByte(TRAIT)
if (flags is Covariant) writeByte(COVARIANT)
if (flags is Contravariant) writeByte(CONTRAVARIANT)
}
sym.annotations.foreach(pickleAnnotation)
}
-
+
def pickleAnnotation(ann: Annotation) = {
writeByte(ANNOTATION)
withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
diff --git a/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala
index c177b05c0..07d3badf7 100644
--- a/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala
@@ -24,7 +24,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
import dotty.tools.dotc.core.pickling.PickleFormat._
import TastyName._
import tpd._
-
+
private var readPositions = false
private var totalRange = NoPosition
private var positions: collection.Map[Addr, Position] = _
@@ -40,12 +40,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
this.totalRange = totalRange
this.positions = positions
}
-
+
private val symAtAddr = new mutable.HashMap[Addr, Symbol]
private val treeAtAddr = new mutable.HashMap[Addr, Tree]
private val typeAtAddr = new mutable.HashMap[Addr, Type] // currently populated only for types that are known to be SHAREd.
- private var stubs: Set[Symbol] = Set()
-
+ private var stubs: Set[Symbol] = Set()
+
private var roots: Set[SymDenotation] = null
/** Enter all toplevel classes and objects into their scopes
@@ -64,13 +64,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
normalizePos(stats, totalRange)
stats
}
-
+
def toTermName(tname: TastyName): TermName = tname match {
case Simple(name) => name
case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
case Signed(original, params, result) => toTermName(original)
case Shadowed(original) => toTermName(original).shadowedName
- case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
+ case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
case SuperAccessor(accessed) => ???
case DefaultGetter(meth, num) => ???
@@ -88,17 +88,17 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
class TreeReader(val reader: TastyReader) {
import reader._
-
+
def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
def fork = forkAt(currentAddr)
-
+
def skipTree(tag: Int): Unit =
if (tag >= firstLengthTreeTag) goto(readEnd())
else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
else if (tag >= firstASTTreeTag) skipTree()
else if (tag >= firstNatTreeTag) readNat()
def skipTree(): Unit = skipTree(readByte())
-
+
def skipParams(): Unit =
while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
@@ -112,7 +112,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
else tag
}
-
+
def readName(): TermName = toTermName(readNameRef())
def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
@@ -124,19 +124,19 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case name =>
toTermName(name)
}
-
+
// ------ Reading types -----------------------------------------------------
-
+
/** Read names in an interleaved sequence of (parameter) names and types/bounds */
- def readParamNames[N <: Name](end: Addr): List[N] =
- until(end) {
+ def readParamNames[N <: Name](end: Addr): List[N] =
+ until(end) {
val name = readName().asInstanceOf[N]
- skipTree()
+ skipTree()
name
}
/** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
- def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
+ def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
until(end) { readNat(); readType().asInstanceOf[T] }
/** Read referece to definition and return symbol created at that definition */
@@ -165,17 +165,17 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
typeAtAddr(start) = tp
op
}
-
+
def readLengthType(): Type = {
val end = readEnd()
-
+
def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = {
val nameReader = fork
nameReader.skipTree() // skip result
val paramReader = nameReader.fork
(nameReader.readParamNames[N](end), paramReader)
}
-
+
val result =
(tag: @switch) match {
case SUPERtype =>
@@ -194,7 +194,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
TypeBounds(readType(), readType())
case TYPEALIAS =>
val alias = readType()
- val variance =
+ val variance =
if (nextByte == COVARIANT) { readByte(); 1 }
else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
else 0
@@ -235,9 +235,9 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
result
}
-
+
def readSimpleType(): Type = (tag: @switch) match {
- case TYPEREFdirect | TERMREFdirect =>
+ case TYPEREFdirect | TERMREFdirect =>
NamedType.withFixedSym(NoPrefix, readSymRef())
case TYPEREFsymbol | TERMREFsymbol =>
readSymNameRef()
@@ -287,10 +287,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case BYNAMEtype =>
ExprType(readType())
}
-
+
if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
}
-
+
private def readSymNameRef()(implicit ctx: Context): Type = {
val sym = readSymRef()
val prefix = readType()
@@ -300,17 +300,17 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
// without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
// the problem arises when a self type of a trait is a type parameter of the same trait.
case _ => res
- }
+ }
}
-
+
private def readPackageRef()(implicit ctx: Context): TermSymbol = {
val name = readName()
if (name == nme.ROOT) defn.RootPackage
else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
else ctx.requiredPackage(name)
}
-
- def readTypeRef(): Type =
+
+ def readTypeRef(): Type =
typeAtAddr(readAddr())
def readPath()(implicit ctx: Context): Type = {
@@ -318,23 +318,23 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
assert(tp.isInstanceOf[SingletonType])
tp
}
-
- def readTermRef()(implicit ctx: Context): TermRef =
+
+ def readTermRef()(implicit ctx: Context): TermRef =
readType().asInstanceOf[TermRef]
// ------ Reading definitions -----------------------------------------------------
-
- private def noRhs(end: Addr): Boolean =
+
+ private def noRhs(end: Addr): Boolean =
currentAddr == end || isModifierTag(nextByte)
-
+
private def localContext(owner: Symbol)(implicit ctx: Context) = {
val lctx = ctx.fresh.setOwner(owner)
if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
}
-
+
private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbstractType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
val lacksDefinition =
- rhsIsEmpty &&
+ rhsIsEmpty &&
name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
isAbstractType
var flags = givenFlags
@@ -350,7 +350,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
flags
}
- /** Create symbol of definition node and enter in symAtAddr map
+ /** Create symbol of definition node and enter in symAtAddr map
* @return true iff the definition does not contain initialization code
*/
def createSymbol()(implicit ctx: Context): Boolean = {
@@ -372,7 +372,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val expandedFlag = if (rawName.isInstanceOf[TastyName.Expanded]) ExpandedName else EmptyFlags
pickling.println(i"creating symbol $name at $start with flags $givenFlags")
val flags = normalizeFlags(tag, givenFlags | expandedFlag, name, isAbstractType, rhsIsEmpty)
- def adjustIfModule(completer: LazyType) =
+ def adjustIfModule(completer: LazyType) =
if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
val sym =
roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
@@ -386,7 +386,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case _ =>
val completer = adjustIfModule(new Completer(subReader(start, end)))
if (isClass)
- ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
privateWithin, coord = start.index)
else {
val sym = symAtAddr.get(start) match {
@@ -412,7 +412,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
tag != VALDEF || rhsIsEmpty
}
- /** Read modifier list into triplet of flags, annotations and a privateWithin
+ /** Read modifier list into triplet of flags, annotations and a privateWithin
* boindary symbol.
*/
def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
@@ -452,7 +452,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case SCALA2X => addFlag(Scala2x)
case DEFAULTparameterized => addFlag(DefaultParameterized)
case INSUPERCALL => addFlag(InSuperCall)
- case PRIVATEqualified =>
+ case PRIVATEqualified =>
readByte()
privateWithin = readType().typeSymbol
case PROTECTEDqualified =>
@@ -470,23 +470,23 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
(flags, annots.toList, privateWithin)
}
-
+
/** Create symbols for a definitions in statement sequence between
* current address and `end`.
* @return true iff none of the statements contains initialization code
*/
def indexStats(end: Addr)(implicit ctx: Context): Boolean = {
- val noInitss =
- until(end) {
+ val noInitss =
+ until(end) {
nextByte match {
- case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
- createSymbol()
- case IMPORT =>
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createSymbol()
+ case IMPORT =>
skipTree()
true
- case PACKAGE =>
+ case PACKAGE =>
processPackage { (pid, end) => implicit ctx => indexStats(end) }
- case _ =>
+ case _ =>
skipTree()
false
}
@@ -505,13 +505,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val pid = ref(readTermRef()).asInstanceOf[RefTree]
op(pid, end)(localContext(pid.symbol.moduleClass))
}
-
+
/** Create symbols the longest consecutive sequence of parameters with given
* `tag starting at current address.
*/
def indexParams(tag: Int)(implicit ctx: Context) =
while (nextByte == tag) createSymbol()
-
+
/** Create symbols for all type and value parameters of template starting
* at current address.
*/
@@ -523,13 +523,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
/** If definition was already read by a completer, return the previously read tree
- * or else read definition.
+ * or else read definition.
*/
def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
case Some(tree) => skipTree(); tree
case none => readNewDef()
}
-
+
private def readNewDef()(implicit ctx: Context): Tree = {
val start = currentAddr
val sym = symAtAddr(start)
@@ -540,7 +540,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
fork.indexParams(tag)
readIndexedParams(tag)
}
-
+
def readParamss(implicit ctx: Context): List[List[ValDef]] = {
collectWhile(nextByte == PARAMS) {
readByte()
@@ -548,19 +548,19 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
readParams[ValDef](PARAM)
}
}
-
- def readRhs(implicit ctx: Context) =
+
+ def readRhs(implicit ctx: Context) =
if (noRhs(end)) EmptyTree
else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
def localCtx = localContext(sym)
-
- def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
+
+ def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
ta.assignType(
untpd.DefDef(
sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
sym)
-
+
def ta = ctx.typeAssigner
val name = readName()
@@ -599,18 +599,18 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
DefDef(Nil, Nil, TypeTree(info))
}
}
- val mods =
+ val mods =
if (sym.annotations.isEmpty) EmptyModifiers
else Modifiers(annotations = sym.annotations.map(_.tree))
tree.withMods(mods) // record annotations in tree so that tree positions can be filled in.
goto(end)
setPos(start, tree)
}
-
+
private def readTemplate(implicit ctx: Context): Template = {
val start = currentAddr
val cls = ctx.owner.asClass
- def setClsInfo(parents: List[TypeRef], selfType: Type) =
+ def setClsInfo(parents: List[TypeRef], selfType: Type) =
cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
setClsInfo(Nil, NoType)
val localDummy = ctx.newLocalDummy(cls)
@@ -625,7 +625,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
}
val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
- val self =
+ val self =
if (nextByte == SELFDEF) {
readByte()
untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
@@ -636,16 +636,16 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
if (noInits) cls.setFlag(NoInits)
val constr = readIndexedDef().asInstanceOf[DefDef]
- def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
+ def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
(tparams, stats) match {
- case (tparam :: tparams1, (alias: TypeDef) :: stats1)
+ case (tparam :: tparams1, (alias: TypeDef) :: stats1)
if tparam.name == alias.name.expandedName(cls) =>
val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
(tparam :: alias :: tas, stats2)
case _ =>
(tparams, stats)
- }
-
+ }
+
val lazyStats = readLater(end, rdr => implicit ctx => {
val stats0 = rdr.readIndexedStats(localDummy, end)
val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
@@ -655,29 +655,29 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
untpd.Template(constr, parents, self, lazyStats)
.withType(localDummy.nonMemberTermRef))
}
-
+
def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
- case TYPEDEF | VALDEF | DEFDEF | IMPORT =>
+ case TYPEDEF | VALDEF | DEFDEF | IMPORT =>
readIndexedDef()
- case IMPORT =>
+ case IMPORT =>
???
case PACKAGE =>
val start = currentAddr
processPackage { (pid, end) => implicit ctx =>
setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
}
- case _ =>
+ case _ =>
readTerm()(ctx.withOwner(exprOwner))
}
-
+
def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
until(end)(readIndexedStat(exprOwner))
-
+
def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
fork.indexStats(end)
readIndexedStats(exprOwner, end)
- }
-
+ }
+
def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
@@ -702,7 +702,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
untpd.Ident(readName()).withType(readType())
case SELECT =>
def readQual(name: Name) = {
- val localCtx =
+ val localCtx =
if (name == nme.CONSTRUCTOR) ctx.fresh.addMode(Mode.InSuperCall) else ctx
readTerm()(localCtx)
}
@@ -716,7 +716,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case name: Name => readRest(name, Signature.NotAMethod)
case (name: Name, sig: Signature) => readRest(name, sig)
}
-
+
case NEW =>
New(readTpt())
case _ =>
@@ -728,12 +728,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val result =
(tag: @switch) match {
- case SUPER =>
+ case SUPER =>
val qual = readTerm()
val mixClass = ifBefore(end)(readType().typeSymbol, NoSymbol)
val mixName = if (mixClass.exists) mixClass.name.asTypeName else tpnme.EMPTY
tpd.Super(qual, mixName, ctx.mode.is(Mode.InSuperCall), mixClass)
- case APPLY =>
+ case APPLY =>
val fn = readTerm()
val isJava = fn.tpe.isInstanceOf[JavaMethodType]
def readArg() = readTerm() match {
@@ -784,7 +784,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
Alternative(until(end)(readTerm()))
case UNAPPLY =>
val fn = readTerm()
- val implicitArgs =
+ val implicitArgs =
collectWhile(nextByte == IMPLICITarg) {
readByte()
readTerm()
@@ -798,19 +798,19 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
result
}
-
+
val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
tree.overwriteType(tree.tpe.simplified)
setPos(start, tree)
}
-
+
def readTpt()(implicit ctx: Context) = {
val start = currentAddr
val tp = readType()
if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
}
- def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
+ def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
def readCase()(implicit ctx: Context): CaseDef = {
@@ -822,18 +822,18 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val guard = ifBefore(end)(readTerm(), EmptyTree)
setPos(start, CaseDef(pat, guard, rhs))
}
-
+
def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
val localReader = fork
goto(end)
new LazyReader(localReader, op)
}
-
+
// ------ Hooks for positions ------------------------------------------------
-
- /** Record address from which tree was created as a temporary position in the tree.
+
+ /** Record address from which tree was created as a temporary position in the tree.
* The temporary position contains deltas relative to the position of the (as yet unknown)
- * parent node. It is marked as a non-synthetic source position.
+ * parent node. It is marked as a non-synthetic source position.
*/
def setPos[T <: Tree](addr: Addr, tree: T): T = {
if (readPositions)
@@ -841,13 +841,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
tree
}
}
-
+
private def setNormalized(tree: Tree, parentPos: Position): Unit = {
assert(tree.pos.exists)
val absPos = Position(parentPos.start + offsetToInt(tree.pos.start), parentPos.end - tree.pos.end)
tree.setPosUnchecked(absPos)
}
-
+
def normalizePos(x: Any, parentPos: Position)(implicit ctx: Context): Unit =
traverse(x, parentPos, setNormalized)
@@ -857,10 +857,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val res = op(reader)(ctx.addMode(Mode.AllowDependentFunctions))
normalizePos(res, parentPos)
res
- }
+ }
}
-
- class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
+
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
extends LazyAnnotation(sym) with DeferredPosition {
def complete(implicit ctx: Context) = {
val res = reader.readTerm()
diff --git a/src/dotty/tools/dotc/core/pickling/UnPickler.scala b/src/dotty/tools/dotc/core/pickling/UnPickler.scala
index a47b8bda2..7d220783b 100644
--- a/src/dotty/tools/dotc/core/pickling/UnPickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/UnPickler.scala
@@ -368,7 +368,7 @@ class UnPickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot:
def fromName(name: Name): Symbol = name.toTermName match {
case nme.ROOT => loadingMirror.RootClass
case nme.ROOTPKG => loadingMirror.RootPackage
- case _ =>
+ case _ =>
def declIn(owner: Symbol) = adjust(owner.info.decl(name))
val sym = declIn(owner)
if (sym.exists || owner.ne(defn.ObjectClass)) sym else declIn(defn.AnyClass)
@@ -687,7 +687,7 @@ class UnPickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot:
if (decls.isEmpty) parent
else {
def addRefinement(tp: Type, sym: Symbol) = {
- def subst(info: Type, rt: RefinedType) =
+ def subst(info: Type, rt: RefinedType) =
if (clazz.isClass) info.substThis(clazz.asClass, SkolemType(rt))
else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
RefinedType(tp, sym.name, subst(sym.info, _))