From 69109c0ace5e3ac831c3b0a5635f25317d3b28bf Mon Sep 17 00:00:00 2001 From: James Iry Date: Thu, 7 Mar 2013 15:05:35 -0800 Subject: Analyze constants to remove unnecessary branches This commit adds analysis and optimization of constants to remove unnecessary branches. It uses abstract interpretation to determine what constant(s) a particular stack slot or variable might or might not hold at a given spot and uses that knowledge to eliminate branches that cannot be taken. Its primary goal is null check removal, but it also works for other constants. Several tests are modified to include the new optimization phase. Two new tests are added. One verifies that branching still works as expected. The other verifies that branches are removed. --- src/compiler/scala/tools/nsc/Global.scala | 10 +- .../nsc/backend/opt/ConstantOptimization.scala | 639 +++++++++++++++++++++ .../scala/tools/nsc/settings/ScalaSettings.scala | 3 +- 3 files changed, 650 insertions(+), 2 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 51fa8f0ab9..2156a39da6 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -25,7 +25,7 @@ import transform._ import backend.icode.{ ICodes, GenICode, ICodeCheckers } import backend.{ ScalaPrimitives, Platform, JavaPlatform } import backend.jvm.GenASM -import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination } +import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination } import backend.icode.analysis._ import scala.language.postfixOps @@ -592,6 +592,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val runsRightAfter = None } with ClosureElimination + // phaseName = "constopt" + object constantOptimization extends { + val global: Global.this.type = Global.this + val runsAfter = List("closelim") + val runsRightAfter = None + } with ConstantOptimization + // phaseName = "dce" object deadCode extends { val global: Global.this.type = Global.this @@ -676,6 +683,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) inliner -> "optimization: do inlining", inlineExceptionHandlers -> "optimization: inline exception handlers", closureElimination -> "optimization: eliminate uncalled closures", + constantOptimization -> "optimization: optimize null and other constants", deadCode -> "optimization: eliminate dead code", terminal -> "The last phase in the compiler chain" ) diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala new file mode 100644 index 0000000000..b3da012e1a --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -0,0 +1,639 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author James Iry + */ + +package scala.tools.nsc +package backend.opt + +import scala.tools.nsc.backend.icode.analysis.LubException +import scala.annotation.tailrec + +/** + * ConstantOptimization uses abstract interpretation to approximate for + * each instruction what constants a variable or stack slot might hold + * or cannot hold. From this it will eliminate unreachable conditionals + * where only one branch is reachable, e.g. to eliminate unnecessary + * null checks. + * + * With some more work it could be extended to + * - cache stable values (final fields, modules) in locals + * - replace the copy propagation in ClosureElilmination + * - fold constants + * - eliminate unnecessary stores and loads + * - propagate knowledge gathered from conditionals for further optimization + */ +abstract class ConstantOptimization extends SubComponent { + import global._ + import icodes._ + import icodes.opcodes._ + + val phaseName = "constopt" + + /** Create a new phase */ + override def newPhase(p: Phase) = new ConstantOptimizationPhase(p) + + /** + * The constant optimization phase. + */ + class ConstantOptimizationPhase(prev: Phase) extends ICodePhase(prev) { + + def name = phaseName + + override def apply(c: IClass) { + if (settings.YconstOptimization.value) { + val analyzer = new ConstantOptimizer + analyzer optimizeClass c + } + } + } + + class ConstantOptimizer { + def optimizeClass(cls: IClass) { + log(s"Analyzing ${cls.methods.size} methods in $cls.") + cls.methods foreach { m => + optimizeMethod(m) + } + } + + def optimizeMethod(m: IMethod) { + if (m.hasCode) { + log(s"Analyzing ${m.symbol}") + val replacementInstructions = interpretMethod(m) + for (block <- m.blocks) { + if (replacementInstructions contains block) { + val instructions = replacementInstructions(block) + block.replaceInstruction(block.lastInstruction, instructions) + } + } + } + } + + /** + * A single possible (or impossible) datum that can be held in Contents + */ + private sealed abstract class Datum + /** + * A constant datum + */ + private case class Const(c: Constant) extends Datum { + def isIntAssignable = c.tag >= BooleanTag && c.tag <= IntTag + def toInt = c.tag match { + case BooleanTag => if (c.booleanValue) 1 else 0 + case _ => c.intValue + } + + /** + * True if this constant has the same representation (and therefore would compare true under eq) as another constant + */ + override def equals(other: Any) = (other match { + case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value) + case _ => false + }) + + /** + * Hash code based on representation of the constant, consistent with equals + */ + override def hashCode = if (c.isIntRange) c.intValue else c.hashCode + + } + /** + * A datum that has been Boxed via a BOX instruction + */ + private case class Boxed(c: Datum) extends Datum + + /** + * The knowledge we have about the abstract state of one location in terms + * of what constants it might or cannot hold. Forms a lower + * lattice where lower elements in the lattice indicate less knowledge. + * + * With the following partial ordering (where '>' indicates more precise knowledge) + * + * Possible(xs) > Possible(xs + y) + * Possible(xs) > Impossible(ys) + * Impossible(xs + y) > Impossible(xs) + * + * and the following merges, which indicate merging knowledge from two paths through + * the code, + * + * // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3 + * Possible(xs) merge Possible(ys) => Possible(xs union ys) + * + * // Left says can't be 2 or 3, right says can't be 3 or 4 + * // then it's not 3 (it could be 2 from the right or 4 from the left) + * Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys) + * + * // Left says it can't be 2 or 3, right says it must be 3 or 4, then + * // it can't be 2 (left rules out 4 and right says 3 is possible) + * Impossible(xs) merge Possible(ys) => Impossible(xs -- ys) + * + * Intuitively, Possible(empty) says that a location can't hold anything, + * it's uninitialized. However, Possible(empty) never appears in the code. + * + * Conversely, Impossible(empty) says nothing is impossible, it could be + * anything. Impossible(empty) is given a synonym UNKNOWN and is used + * for, e.g., the result of an arbitrary method call. + */ + private sealed abstract class Contents { + /** + * Join this Contents with another coming from another path. Join enforces + * the lattice structure. It is symmetrical and never moves upward in the + * lattice + */ + final def merge(other: Contents): Contents = if (this eq other) this else (this, other) match { + case (Possible(possible1), Possible(possible2)) => + Possible(possible1 union possible2) + case (Impossible(impossible1), Impossible(impossible2)) => + Impossible(impossible1 intersect impossible2) + case (Impossible(impossible), Possible(possible)) => + Impossible(impossible -- possible) + case (Possible(possible), Impossible(impossible)) => + Impossible(impossible -- possible) + } + // TODO we could have more fine-grained knowledge, e.g. know that 0 < x < 3. But for now equality/inequality is a good start. + def mightEqual(other: Contents): Boolean + def mightNotEqual(other: Contents): Boolean + } + private def SingleImpossible(x: Datum) = new Impossible(Set(x)) + + /** + * The location is known to have one of a set of values. + */ + private case class Possible(possible: Set[Datum]) extends Contents { + assert(possible.nonEmpty, "Contradiction: had an empty possible set indicating an uninitialized location") + def mightEqual(other: Contents): Boolean = (this eq other) || (other match { + // two Possibles might be equal if they have any possible members in common + case Possible(possible2) => (possible intersect possible2).nonEmpty + // a possible can be equal to an impossible if the impossible doesn't rule + // out all the possibilities + case Impossible(possible2) => (possible -- possible2).nonEmpty + }) + def mightNotEqual(other: Contents): Boolean = (this ne other) && (other match { + // two Possibles might not be equal if either has possible members that the other doesn't + case Possible(possible2) => (possible -- possible2).nonEmpty || (possible2 -- possible).nonEmpty + case Impossible(_) => true + }) + } + private def SinglePossible(x: Datum) = new Possible(Set(x)) + + /** + * The location is known to not have any of a set of values value (e.g null). + */ + private case class Impossible(impossible: Set[Datum]) extends Contents { + def mightEqual(other: Contents): Boolean = (this eq other) || (other match { + case Possible(_) => other mightEqual this + case _ => true + }) + def mightNotEqual(other: Contents): Boolean = (this eq other) || (other match { + case Possible(_) => other mightNotEqual this + case _ => true + }) + } + + /** + * Our entire knowledge about the contents of all variables and the stack. It forms + * a lattice primarily driven by the lattice structure of Contents. + * + * In addition to the rules of contents, State has the following properties: + * - The merge of two sets of locals holds the merges of locals found in the intersection + * of the two sets of locals. Locals not found in a + * locals map are thus possibly uninitialized and attempting to load them results + * in an error. + * - The stack heights of two states must match otherwise it's an error to merge them + * + * State is immutable in order to aid in structure sharing of local maps and stacks + */ + private case class State(locals: Map[Local, Contents], stack: List[Contents]) { + def mergeLocals(olocals: Map[Local, Contents]): Map[Local, Contents] = if (locals eq olocals) locals else Map((for { + key <- (locals.keySet intersect olocals.keySet).toSeq + } yield (key, locals(key) merge olocals(key))): _*) + + def merge(other: State): State = if (this eq other) this else { + @tailrec def mergeStacks(l: List[Contents], r: List[Contents], out: List[Contents]): List[Contents] = (l, r) match { + case (Nil, Nil) => out.reverse + case (l, r) if l eq r => out.reverse ++ l + case (lhead :: ltail, rhead :: rtail) => mergeStacks(ltail, rtail, (lhead merge rhead) :: out) + case _ => sys.error("Mismatched stack heights") + } + + val newLocals = mergeLocals(other.locals) + + val newStack = if (stack eq other.stack) stack else mergeStacks(stack, other.stack, Nil) + State(newLocals, newStack) + } + + /** + * Peek at the top of the stack without modifying it. Error if the stack is empty + */ + def peek(n: Int): Contents = stack(n) + /** + * Push contents onto a stack + */ + def push(contents: Contents): State = this copy (stack = contents :: stack) + /** + * Drop n elements from the stack + */ + def drop(number: Int): State = this copy (stack = stack drop number) + /** + * Store the top of the stack into the specified local. An error if the stack + * is empty + */ + def store(variable: Local): State = { + val contents = stack.head + val newVariables = locals + ((variable, contents)) + new State(newVariables, stack.tail) + } + /** + * Load the specified local onto the top of the stack. An error the the local is uninitialized. + */ + def load(variable: Local): State = { + val contents: Contents = locals.getOrElse(variable, sys.error(s"$variable is not initialized")) + push(contents) + } + /** + * A copy of this State with an empty stack + */ + def cleanStack: State = if (stack.isEmpty) this else this copy (stack = Nil) + } + + // some precomputed constants + private val NULL = Const(Constant(null: Any)) + private val UNKNOWN = Impossible(Set.empty) + private val NOT_NULL = SingleImpossible(NULL) + private val CONST_UNIT = SinglePossible(Const(Constant(()))) + private val CONST_FALSE = SinglePossible(Const(Constant(false))) + private val CONST_ZERO_BYTE = SinglePossible(Const(Constant(0: Byte))) + private val CONST_ZERO_SHORT = SinglePossible(Const(Constant(0: Short))) + private val CONST_ZERO_CHAR = SinglePossible(Const(Constant(0: Char))) + private val CONST_ZERO_INT = SinglePossible(Const(Constant(0: Int))) + private val CONST_ZERO_LONG = SinglePossible(Const(Constant(0: Long))) + private val CONST_ZERO_FLOAT = SinglePossible(Const(Constant(0.0f))) + private val CONST_ZERO_DOUBLE = SinglePossible(Const(Constant(0.0d))) + private val CONST_NULL = SinglePossible(NULL) + + /** + * Given a TypeKind, figure out what '0' for it means in order to interpret CZJUMP + */ + private def getZeroOf(k: TypeKind): Contents = k match { + case UNIT => CONST_UNIT + case BOOL => CONST_FALSE + case BYTE => CONST_ZERO_BYTE + case SHORT => CONST_ZERO_SHORT + case CHAR => CONST_ZERO_CHAR + case INT => CONST_ZERO_INT + case LONG => CONST_ZERO_LONG + case FLOAT => CONST_ZERO_FLOAT + case DOUBLE => CONST_ZERO_DOUBLE + case REFERENCE(_) => CONST_NULL + case ARRAY(_) => CONST_NULL + case BOXED(_) => CONST_NULL + case ConcatClass => abort("no zero of ConcatClass") + } + + // normal locals can't be null, so we use null to mean the magic 'this' local + private val THIS_LOCAL: Local = null + + /** + * interpret a single instruction to find its impact on the abstract state + */ + private def interpretInst(in: State, inst: Instruction): State = inst match { + case THIS(_) => + in load THIS_LOCAL + + case CONSTANT(k) => + in push SinglePossible(Const(k)) + + case LOAD_ARRAY_ITEM(_) => + in drop 2 push UNKNOWN + + case LOAD_LOCAL(local) => + // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant + in load local + + case LOAD_FIELD(_, isStatic) => + val drops = if (isStatic) 0 else 1 + in drop drops push UNKNOWN + + case LOAD_MODULE(_) => + in push NOT_NULL + + case STORE_ARRAY_ITEM(_) => + in drop 3 + + case STORE_LOCAL(local) => + in store local + + case STORE_THIS(_) => + // if a local is already known to have a constant and we're replacing with the same constant then we can + // replace this with a drop + in store THIS_LOCAL + + case STORE_FIELD(_, isStatic) => + val drops = if (isStatic) 1 else 2 + in drop drops + + case CALL_PRIMITIVE(_) => + in drop inst.consumed push UNKNOWN + + case CALL_METHOD(_, _) => + // TODO we could special case implementations of equals that are known, e.g. String#equals + // We could turn Possible(string constants).equals(Possible(string constants) into an eq check + // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString) + // and eliminate the null check that likely precedes this call + val initial = in drop inst.consumed + (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN } + + case BOX(_) => + val value = in peek 0 + // we simulate boxing by, um, boxing the possible/impossible contents + // so if we have Possible(1,2) originally then we'll end up with + // a Possible(Boxed(1), Boxed(2)) + // Similarly, if we know the input is not a 0 then we'll know the + // output is not a Boxed(0) + val newValue = value match { + case Possible(values) => Possible(values map Boxed) + case Impossible(values) => Impossible(values map Boxed) + } + in drop 1 push newValue + + case UNBOX(_) => + val value = in peek 0 + val newValue = value match { + // if we have a Possible, then all the possibilities + // should themselves be Boxes. In that + // case we can merge them to figure out what the UNBOX will produce + case Possible(inners) => + assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location") + val sanitized: Set[Contents] = (inners map { + case Boxed(content) => SinglePossible(content) + case _ => UNKNOWN + }) + sanitized reduce (_ merge _) + // if we have an impossible then the thing that's impossible + // should be a box. We'll unbox that to see what we get + case unknown@Impossible(inners) => + if (inners.isEmpty) { + unknown + } else { + val sanitized: Set[Contents] = (inners map { + case Boxed(content) => SingleImpossible(content) + case _ => UNKNOWN + }) + sanitized reduce (_ merge _) + } + } + in drop 1 push newValue + + case NEW(_) => + in push NOT_NULL + + case CREATE_ARRAY(_, dims) => + in drop dims push NOT_NULL + + case IS_INSTANCE(_) => + // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP + // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can + // know that whatever was checked was not a null + // see the TODO on CJUMP for more information about propagating null + // information + // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and + // replace with a constant false, but how often is a knowable null checked for instanceof? + // TODO we could track type information and statically know to eliminate IS_INSTANCE + // but that's probably not a huge win + in drop 1 push UNKNOWN // it's actually a Possible(true, false) but since the following instruction + // will be a conditional jump comparing to true or false there + // nothing to be gained by being more precise + + case CHECK_CAST(_) => + // TODO we could track type information and statically know to eliminate CHECK_CAST + // but that's probably not a huge win + in + + case DROP(_) => + in drop 1 + + case DUP(_) => + val value = in peek 0 + in push value + + case MONITOR_ENTER() => + in drop 1 + + case MONITOR_EXIT() => + in drop 1 + + case SCOPE_ENTER(_) | SCOPE_EXIT(_) => + in + + case LOAD_EXCEPTION(_) => + in push NOT_NULL + + case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) => + dumpClassesAndAbort("Unexpected block ending instruction: " + inst) + } + + /** + * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return. + * It will result in a map from target blocks to the input state computed for that block. It + * also computes a replacement list of instructions + */ + private def interpretLast(in: State, inst: Instruction): (Map[BasicBlock, State], List[Instruction]) = { + def canSwitch(in1: Contents, tagSet: List[Int]) = { + in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) }) + } + + /** + * common code for interpreting CJUMP and CZJUMP + */ + def interpretConditional(kind: TypeKind, in: State, toDrop: Int, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = { + // TODO use reaching analysis to update the state in the two branches + // e.g. if the comparison was checking null equality on local x + // then the in the success branch we know x is null and + // on the failure branch we know it is not + // in fact, with copy propagation we could propagate that knowledge + // back through a chain of locations + // + // TODO if we do all that we need to be careful in the + // case that success and failure are the same target block + // because we're using a Map and don't want one possible state to clobber the other + // alternative mayb we should just replace the conditional with a jump if both targets are the same + + def mightEqual = val1 mightEqual val2 + def mightNotEqual = val1 mightNotEqual val2 + def guaranteedEqual = mightEqual && !mightNotEqual + + def succPossible = cond match { + case EQ => mightEqual + case NE => mightNotEqual + case LT | GT => !guaranteedEqual // if the two are guaranteed to be equal then they can't be LT/GT + case LE | GE => true + } + + def failPossible = cond match { + case EQ => mightNotEqual + case NE => mightEqual + case LT | GT => true + case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE + } + + val out = in drop toDrop + + var result = Map[BasicBlock, State]() + if (succPossible) { + result += ((success, out)) + } + + if (failPossible) { + result += ((failure, out)) + } + + if (result.size == 1) (result, List.fill(toDrop)(DROP(kind)) :+ JUMP(result.keySet.head)) + else (result, inst :: Nil) + } + + inst match { + case JUMP(whereto) => + (Map((whereto, in)), inst :: Nil) + + case CJUMP(success, failure, cond, kind) => + val in1 = in peek 0 + val in2 = in peek 1 + interpretConditional(kind, in, 2, in1, in2, success, failure, cond) + + case CZJUMP(success, failure, cond, kind) => + val in1 = in peek 0 + val in2 = getZeroOf(kind) + interpretConditional(kind, in, 1, in1, in2, success, failure, cond) + + case SWITCH(tags, labels) => + val in1 = in peek 0 + val newStuff = tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) } + val (reachableTags, reachableNormalLabels) = (tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) }).unzip + val reachableLabels = if (labels.size > tags.size) { + // if we've got an extra label then it's the default + val defaultLabel = labels.last + // see if the default is reachable by seeing if the input might be out of the set + // of all tags + val allTags = Possible(tags.flatten.toSet map { tag: Int => Const(Constant(tag)) }) + if (in1 mightNotEqual allTags) { + reachableNormalLabels :+ defaultLabel + } else { + reachableNormalLabels + } + } else { + reachableNormalLabels + } + // TODO similar to the comment in interpretConditional, we should update our the State going into each + // branch based on which tag is being matched. Also, just like interpretConditional, if target blocks + // are the same we need to merge State rather than clobber + + // alternative, maybe we should simplify the SWITCH to not have same target labels + val newState = in drop 1 + val result = Map(reachableLabels map { label => (label, newState) }: _*) + if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil) + else (result, inst :: Nil) + + // these instructions don't have target blocks + // (exceptions are assumed to be reachable from all instructions) + case RETURN(_) | THROW(_) => + (Map.empty, inst :: Nil) + + case _ => + dumpClassesAndAbort("Unexpected non-block ending instruction: " + inst) + } + } + + /** + * Analyze a single block to find how it transforms an input state into a states for its successor blocks + * Also computes a list of instructions to be used to replace its last instruction + */ + private def interpretBlock(in: State, block: BasicBlock): (Map[BasicBlock, State], Map[BasicBlock, State], List[Instruction]) = { + debuglog(s"interpreting block $block") + // number of instructions excluding the last one + val normalCount = block.size - 1 + + var exceptionState = in.cleanStack + var normalExitState = in + var idx = 0 + while (idx < normalCount) { + val inst = block(idx) + normalExitState = interpretInst(normalExitState, inst) + if (normalExitState.locals ne exceptionState.locals) + exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals) + idx += 1 + } + + val pairs = block.exceptionSuccessors map { b => (b, exceptionState) } + val exceptionMap = Map(pairs: _*) + + val (normalExitMap, newInstructions) = interpretLast(normalExitState, block.lastInstruction) + + (normalExitMap, exceptionMap, newInstructions) + } + + /** + * Analyze a single method to find replacement instructions + */ + private def interpretMethod(m: IMethod): Map[BasicBlock, List[Instruction]] = { + import scala.collection.mutable.{ Set => MSet, Map => MMap } + + debuglog(s"interpreting method $m") + var iterations = 0 + + // initially we know that 'this' is not null and the params are initialized to some unknown value + val initThis: Iterator[(Local, Contents)] = if (m.isStatic) Iterator.empty else Iterator.single((THIS_LOCAL, NOT_NULL)) + val initOtherLocals: Iterator[(Local, Contents)] = m.params.iterator map { param => (param, UNKNOWN) } + val initialLocals: Map[Local, Contents] = Map((initThis ++ initOtherLocals).toSeq: _*) + val initialState = State(initialLocals, Nil) + + // worklist of basic blocks to process, initially the start block + val worklist = MSet(m.startBlock) + // worklist of exception basic blocks. They're kept in a separate set so they can be + // processed after normal flow basic blocks. That's because exception basic blocks + // are more likely to have multiple predecessors and queueing them for later + // increases the chances that they'll only need to be interpreted once + val exceptionlist = MSet[BasicBlock]() + // our current best guess at what the input state is for each block + // initially we only know about the start block + val inputState = MMap[BasicBlock, State]((m.startBlock, initialState)) + + // update the inputState map based on new information from interpreting a block + // When the input state of a block changes, add it back to the work list to be + // reinterpreted + def updateInputStates(outputStates: Map[BasicBlock, State], worklist: MSet[BasicBlock]) { + for ((block, newState) <- outputStates) { + val oldState = inputState get block + val updatedState = oldState map (x => x merge newState) getOrElse newState + if (oldState != Some(updatedState)) { + worklist add block + inputState(block) = updatedState + } + } + } + + // the instructions to be used as the last instructions on each block + val replacements = MMap[BasicBlock, List[Instruction]]() + + while (worklist.nonEmpty || exceptionlist.nonEmpty) { + if (worklist.isEmpty) { + // once the worklist is empty, start processing exception blocks + val block = exceptionlist.head + exceptionlist remove block + worklist add block + } else { + iterations += 1 + val block = worklist.head + worklist remove block + val (normalExitMap, exceptionMap, newInstructions) = interpretBlock(inputState(block), block) + + updateInputStates(normalExitMap, worklist) + updateInputStates(exceptionMap, exceptionlist) + replacements(block) = newInstructions + } + } + + debuglog(s"method $m with ${m.blocks.size} reached fixpoint in $iterations iterations") + replacements.toMap + } + } +} diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 702071f906..757303e335 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -38,7 +38,7 @@ trait ScalaSettings extends AbsScalaSettings protected def futureSettings = List[BooleanSetting]() /** Enabled under -optimise. */ - protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce) + protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization) /** Internal use - syntax enhancements. */ private class EnableSettings[T <: BooleanSetting](val s: T) { @@ -128,6 +128,7 @@ trait ScalaSettings extends AbsScalaSettings val check = PhasesSetting ("-Ycheck", "Check the tree at the end of") val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after") val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.") + val YconstOptimization = BooleanSetting ("-Yconst-opt", "Perform optimization with constant values.") val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.") val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.") val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.") -- cgit v1.2.3 From 2fa2db784075dfb58cf507c45a948819ade8a6d4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 10:00:54 -0700 Subject: SI-7228, bug in weak subtyping. Another in the category of bugs which involve narrowing, widening, mediuming, dealiasing, weakening, normalizing, denormalizing, supernormalizing, subnormalizing, and double-bounded supersubnormalizing. This is probably not the ideal fix, but it is an improvement. --- .../scala/tools/nsc/typechecker/Implicits.scala | 23 +++---- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 4 +- .../scala/reflect/internal/tpe/TypeComparers.scala | 6 +- test/files/pos/t7228.scala | 75 ++++++++++++++++++++++ 5 files changed, 91 insertions(+), 19 deletions(-) create mode 100644 test/files/pos/t7228.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 2331f82a58..29d4c8423b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -268,7 +268,7 @@ trait Implicits { */ object Function1 { val Sym = FunctionClass(1) - def unapply(tp: Type) = tp match { + def unapply(tp: Type) = tp baseType Sym match { case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some((arg1, arg2)) case _ => None } @@ -431,10 +431,8 @@ trait Implicits { val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { - case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) => - matchesPtView(tp, arg1, arg2, undet) - case _ => - false + case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) + case _ => false } } if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start) @@ -576,20 +574,19 @@ trait Implicits { def fail(reason: String): SearchResult = failure(itree, reason) try { - val itree1 = - if (isView) { - val arg1 :: arg2 :: _ = pt.typeArgs + val itree1 = pt match { + case Function1(arg1, arg2) if isView => typed1( atPos(itree.pos)(Apply(itree, List(Ident("") setType approximate(arg1)))), EXPRmode, approximate(arg2) ) - } - else - typed1(itree, EXPRmode, wildPt) - - if (context.hasErrors) + case _ => typed1(itree, EXPRmode, wildPt) + } + if (context.hasErrors) { + log("implicit adapt failed: " + context.errBuffer.head.errMsg) return fail(context.errBuffer.head.errMsg) + } if (Statistics.canEnable) Statistics.incCounter(typedImplicits) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index eaf57cd39c..a110d6d15d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1133,7 +1133,7 @@ trait Typers extends Adaptations with Tags { return typedPos(tree.pos, mode, pt) { Block(List(tree), Literal(Constant())) } - } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) { + } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe.dealiasWiden, pt)) { if (settings.warnNumericWiden.value) context.unit.warning(tree.pos, "implicit numeric widening") return typedPos(tree.pos, mode, pt) { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a6c5367425..b59732e595 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1217,7 +1217,7 @@ trait Types protected def rewrap(newtp: Type): Type = NotNullType(newtp) override def isNotNull: Boolean = true override def notNull = this - override def deconst: Type = underlying //todo: needed? + override def deconst: Type = underlying.deconst //todo: needed? override def safeToString: String = underlying.toString + " with NotNull" override def kind = "NotNullType" } @@ -1989,7 +1989,7 @@ trait Types assert(underlying.typeSymbol != UnitClass) override def isTrivial: Boolean = true override def isNotNull = value.value != null - override def deconst: Type = underlying + override def deconst: Type = underlying.deconst override def safeToString: String = underlying.toString + "(" + value.escapedStringValue + ")" override def kind = "ConstantType" diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 82321f61c2..2d499cf299 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -583,7 +583,7 @@ trait TypeComparers { def isWeakSubType(tp1: Type, tp2: Type) = - tp1.deconst.normalize match { + tp1.widen.normalize match { case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => tp2.deconst.normalize match { case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => @@ -609,8 +609,8 @@ trait TypeComparers { * (Even if the calls are to typeSymbolDirect.) */ def isNumericSubType(tp1: Type, tp2: Type): Boolean = ( - isNumericValueType(tp1) - && isNumericValueType(tp2) + isNumericValueType(tp1.dealiasWiden) + && isNumericValueType(tp2.dealias) && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol) ) diff --git a/test/files/pos/t7228.scala b/test/files/pos/t7228.scala new file mode 100644 index 0000000000..5d936f6529 --- /dev/null +++ b/test/files/pos/t7228.scala @@ -0,0 +1,75 @@ +object AdaptWithWeaklyConformantType { + implicit class D(d: Double) { def double = d*2 } + + val x1: Int = 1 + var x2: Int = 2 + val x3 = 3 + var x4 = 4 + final val x5 = 5 + final var x6 = 6 + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} + +object AdaptAliasWithWeaklyConformantType { + implicit class D(d: Double) { def double = d*2 } + type T = Int + + val x1: T = 1 + var x2: T = 2 + val x3 = (3: T) + var x4 = (4: T) + final val x5 = (5: T) + final var x6 = (6: T) + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} + +object AdaptToAliasWithWeaklyConformantType { + type U = Double + implicit class D(d: U) { def double = d*2 } + + val x1: Int = 1 + var x2: Int = 2 + val x3 = (3: Int) + var x4 = (4: Int) + final val x5 = (5: Int) + final var x6 = (6: Int) + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} + +object AdaptAliasToAliasWithWeaklyConformantType { + type U = Double + type T = Int + implicit class D(d: U) { def double = d*2 } + + val x1: T = 1 + var x2: T = 2 + val x3 = (3: T) + var x4 = (4: T) + final val x5 = (5: T) + final var x6 = (6: T) + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} -- cgit v1.2.3 From cb02c96bed1454e1c0702c529366f3c40d6bffd9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 10:00:54 -0700 Subject: Simplified the widening logic. Should speak for itself. Whenever someone changed @switch from an error to a warning, it broke all the tests which depended on the error. I added -Xfatal-warnings to a couple which needed it. And one of those tests was then failing, as it must now since we couldn't get away with what was being attempted, so I moved it to pending. --- .../scala/tools/nsc/typechecker/Namers.scala | 30 ++++++++++------------ test/files/pos/no-widen-locals.scala | 19 -------------- test/files/pos/switch-small.flags | 1 + test/pending/pos/no-widen-locals.flags | 1 + test/pending/pos/no-widen-locals.scala | 19 ++++++++++++++ 5 files changed, 34 insertions(+), 36 deletions(-) delete mode 100644 test/files/pos/no-widen-locals.scala create mode 100644 test/files/pos/switch-small.flags create mode 100644 test/pending/pos/no-widen-locals.flags create mode 100644 test/pending/pos/no-widen-locals.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 007c7c6a83..d5da4967be 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -805,23 +805,19 @@ trait Namers extends MethodSynthesis { case _ => false } - - val tpe1 = dropIllegalStarTypes(tpe.deconst) - val tpe2 = tpe1.widen - - // This infers Foo.type instead of "object Foo" - // See Infer#adjustTypeArgs for the polymorphic case. - if (tpe.typeSymbolDirect.isModuleClass) tpe1 - else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag) - if (tpe2 <:< pt) tpe2 else tpe1 - else if (isHidden(tpe)) tpe2 - // In an attempt to make pattern matches involving method local vals - // compilable into switches, for a time I had a more generous condition: - // `if (sym.isFinal || sym.isLocal) tpe else tpe1` - // This led to issues with expressions like classOf[List[_]] which apparently - // depend on being deconst-ed here, so this is again the original: - else if (!sym.isFinal) tpe1 - else tpe + val shouldWiden = ( + !tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo" + && (tpe.widen <:< pt) // Don't widen our way out of conforming to pt + && ( sym.isVariable + || sym.isMethod && !sym.hasAccessorFlag + || isHidden(tpe) + ) + ) + dropIllegalStarTypes( + if (shouldWiden) tpe.widen + else if (sym.isFinal) tpe // "final val" allowed to retain constant type + else tpe.deconst + ) } /** Computes the type of the body in a ValDef or DefDef, and * assigns the type to the tpt's node. Returns the type. diff --git a/test/files/pos/no-widen-locals.scala b/test/files/pos/no-widen-locals.scala deleted file mode 100644 index 013e63f0a2..0000000000 --- a/test/files/pos/no-widen-locals.scala +++ /dev/null @@ -1,19 +0,0 @@ -// Worked from r23262 until that was reverted somewhere -// around r25016. -import annotation.switch - -object Test { - def f(x: Int) = { - val X1 = 5 - val X2 = 10 - val X3 = 15 - val X4 = 20 - - (x: @switch) match { - case X1 => 1 - case X2 => 2 - case X3 => 3 - case X4 => 4 - } - } -} diff --git a/test/files/pos/switch-small.flags b/test/files/pos/switch-small.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/pos/switch-small.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/pending/pos/no-widen-locals.flags b/test/pending/pos/no-widen-locals.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/pending/pos/no-widen-locals.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/pending/pos/no-widen-locals.scala b/test/pending/pos/no-widen-locals.scala new file mode 100644 index 0000000000..013e63f0a2 --- /dev/null +++ b/test/pending/pos/no-widen-locals.scala @@ -0,0 +1,19 @@ +// Worked from r23262 until that was reverted somewhere +// around r25016. +import annotation.switch + +object Test { + def f(x: Int) = { + val X1 = 5 + val X2 = 10 + val X3 = 15 + val X4 = 20 + + (x: @switch) match { + case X1 => 1 + case X2 => 2 + case X3 => 3 + case X4 => 4 + } + } +} -- cgit v1.2.3 From 9c5ea96b1c0fa45037a96e530b6ae71687a292d1 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 10:11:28 -0700 Subject: Moved some numeric subtyping logic closer to center. Fixed bug in numeric widening related to continuations, which enabled simplifying isNumericSubType. --- .../scala/tools/nsc/typechecker/Typers.scala | 9 ++++--- .../scala/reflect/internal/Definitions.scala | 2 +- .../scala/reflect/internal/tpe/TypeComparers.scala | 31 +++++++++++++--------- 3 files changed, 25 insertions(+), 17 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a110d6d15d..c19d6b7a56 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1124,16 +1124,19 @@ trait Typers extends Adaptations with Tags { else { if (mode.inExprModeButNot(FUNmode)) { pt.dealias match { - case TypeRef(_, sym, _) => + // The <: Any requirement inhibits attempts to adapt continuation types + // to non-continuation types. + case TypeRef(_, sym, _) if tree.tpe <:< AnyClass.tpe => // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially // infinite expansion if pt is constant type () - if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12) + if (sym == UnitClass) { // (12) if (settings.warnValueDiscard.value) context.unit.warning(tree.pos, "discarded non-Unit value") return typedPos(tree.pos, mode, pt) { Block(List(tree), Literal(Constant())) } - } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe.dealiasWiden, pt)) { + } + else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) { if (settings.warnNumericWiden.value) context.unit.warning(tree.pos, "implicit numeric widening") return typedPos(tree.pos, mode, pt) { diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index fe5a5c81e2..bfba81c654 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1131,7 +1131,7 @@ trait Definitions extends api.StandardDefinitions { /** Is type's symbol a numeric value class? */ def isNumericValueType(tp: Type): Boolean = tp match { case TypeRef(_, sym, _) => isNumericValueClass(sym) - case _ => false + case _ => false } // todo: reconcile with javaSignature!!! diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 2d499cf299..a03ab1610e 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -5,6 +5,7 @@ package tpe import scala.collection.{ mutable } import Flags._ import util.Statistics +import scala.annotation.tailrec trait TypeComparers { self: SymbolTable => @@ -583,9 +584,9 @@ trait TypeComparers { def isWeakSubType(tp1: Type, tp2: Type) = - tp1.widen.normalize match { + tp1.dealiasWiden match { case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => - tp2.deconst.normalize match { + tp2.deconst.dealias match { case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => isNumericSubClass(sym1, sym2) case tv2 @ TypeVar(_, _) => @@ -594,7 +595,7 @@ trait TypeComparers { isSubType(tp1, tp2) } case tv1 @ TypeVar(_, _) => - tp2.deconst.normalize match { + tp2.deconst.dealias match { case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true) case _ => @@ -604,14 +605,18 @@ trait TypeComparers { isSubType(tp1, tp2) } - /** The isNumericValueType tests appear redundant, but without them - * test/continuations-neg/function3.scala goes into an infinite loop. - * (Even if the calls are to typeSymbolDirect.) - */ - def isNumericSubType(tp1: Type, tp2: Type): Boolean = ( - isNumericValueType(tp1.dealiasWiden) - && isNumericValueType(tp2.dealias) - && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol) - ) - + def isNumericSubType(tp1: Type, tp2: Type) = ( + isNumericSubClass(primitiveBaseClass(tp1.dealiasWiden), primitiveBaseClass(tp2.dealias)) + ) + + /** If the given type has a primitive class among its base classes, + * the symbol of that class. Otherwise, NoSymbol. + */ + private def primitiveBaseClass(tp: Type): Symbol = { + @tailrec def loop(bases: List[Symbol]): Symbol = bases match { + case Nil => NoSymbol + case x :: xs => if (isPrimitiveValueClass(x)) x else loop(xs) + } + loop(tp.baseClasses) + } } -- cgit v1.2.3 From 34faa0d073a8613deebffe7605fd8a5e9a93afbc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 Mar 2013 18:35:28 +0100 Subject: SI-6601 Close access loophole for value class constructors ExtensionMethods marks private constructors of value classes as notPRIVATE before pickling. When the pickler reads the flags of this symbol, the anti-shift mechanism folds this into the regular PRIVATE flag, so the class is pickled as though it was public all along. A seprately compiled client can then call this constructor. To remedy this, we must: - pickle `rawFlags`, rather than `flags`. This is symmetric with unpickling, which sets `rawFlags` with the value it reads. - Add `notPRIVATE` to the flagset `PickledFlags`. We cannot make this change in a minor version, as the pickler and unpickler must agree on `PickledFlags`. I believe that this won't change the size of pickled flags for the majority of symbols (ie, those without the notPRIVATE flag) due to the variable length encoding in `writeLongNat`. This also improves the situation for SI-6608. Reflection and scalap (and, by extension, IntelliJ), no longer will see as public methods that have had their access widened in SuperAccessors (which is done selectively to support inlining under separate compilation.) --- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 ++ src/reflect/scala/reflect/internal/Flags.scala | 6 +++++- test/files/neg/t6601.check | 4 ++++ test/files/neg/t6601/AccessPrivateConstructor_2.scala | 3 +++ test/files/neg/t6601/PrivateConstructor_1.scala | 1 + test/files/run/t6608.check | 1 + test/files/run/t6608.scala | 16 ++++++++++++++++ 8 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t6601.check create mode 100644 test/files/neg/t6601/AccessPrivateConstructor_2.scala create mode 100644 test/files/neg/t6601/PrivateConstructor_1.scala create mode 100644 test/files/run/t6608.check create mode 100644 test/files/run/t6608.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 140be0e17b..9b33ae8ba1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -528,7 +528,7 @@ abstract class Pickler extends SubComponent { private def writeSymInfo(sym: Symbol) { writeRef(sym.name) writeRef(localizedOwner(sym)) - writeLongNat((rawToPickledFlags(sym.flags & PickledFlags))) + writeLongNat((rawToPickledFlags(sym.rawflags & PickledFlags))) if (sym.hasAccessBoundary) writeRef(sym.privateWithin) writeRef(sym.info) } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b7221a78ec..b32fc6b977 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1553,6 +1553,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans val bridges = addVarargBridges(currentOwner) checkAllOverrides(currentOwner) checkAnyValSubclass(currentOwner) + if (currentOwner.isDerivedValueClass) + currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler! if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc") diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 1987f34474..fe46a0471e 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -307,7 +307,11 @@ class Flags extends ModifierFlags { assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled)) /** These flags are pickled */ - final val PickledFlags = InitialFlags & ~FlagsNotPickled + final val PickledFlags = ( + (InitialFlags & ~FlagsNotPickled) + | notPRIVATE // for value class constructors (SI-6601), and private members referenced + // in @inline-marked methods publicized in SuperAccessors (see SI-6608, e6b4204604) + ) /** If we have a top-level class or module * and someone asks us for a flag not in TopLevelPickledFlags, diff --git a/test/files/neg/t6601.check b/test/files/neg/t6601.check new file mode 100644 index 0000000000..1410e1b11a --- /dev/null +++ b/test/files/neg/t6601.check @@ -0,0 +1,4 @@ +AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor + new PrivateConstructor("") // Scalac should forbid accessing to the private constructor! + ^ +one error found diff --git a/test/files/neg/t6601/AccessPrivateConstructor_2.scala b/test/files/neg/t6601/AccessPrivateConstructor_2.scala new file mode 100644 index 0000000000..816bc10d79 --- /dev/null +++ b/test/files/neg/t6601/AccessPrivateConstructor_2.scala @@ -0,0 +1,3 @@ +class AccessPrivateConstructor { + new PrivateConstructor("") // Scalac should forbid accessing to the private constructor! +} diff --git a/test/files/neg/t6601/PrivateConstructor_1.scala b/test/files/neg/t6601/PrivateConstructor_1.scala new file mode 100644 index 0000000000..f09d7ad068 --- /dev/null +++ b/test/files/neg/t6601/PrivateConstructor_1.scala @@ -0,0 +1 @@ +class PrivateConstructor private(val s: String) extends AnyVal diff --git a/test/files/run/t6608.check b/test/files/run/t6608.check new file mode 100644 index 0000000000..15628b322e --- /dev/null +++ b/test/files/run/t6608.check @@ -0,0 +1 @@ +(C$$yyy,true) diff --git a/test/files/run/t6608.scala b/test/files/run/t6608.scala new file mode 100644 index 0000000000..2f956bfb35 --- /dev/null +++ b/test/files/run/t6608.scala @@ -0,0 +1,16 @@ +import reflect.runtime.universe + +class C { + private val yyy: Any = 1 + @inline def foo = yyy +} + +object Test extends App { + import universe._ + val access = typeOf[C].declarations + .toList + .filter(_.name.toString.endsWith("yyy")) + .map(x => (x.name, x.isPrivate)) + println(access.head) +} + -- cgit v1.2.3 From 9fed30cb9c6d86ca07286febeb44bf635cb23650 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 11 Mar 2013 22:11:19 -0700 Subject: Warn about forgotten string interpolators. In the compiler sources this arrives with a number of false positives, because we frequently work with strings containing $foo where foo is an in-scope identifier. I think in normal source code this will be less of a problem, or none at all; but to be conservative the warning is born under -Xlint. --- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 16 ++++++++++++++++ test/files/neg/forgot-interpolator.check | 9 +++++++++ test/files/neg/forgot-interpolator.flags | 1 + test/files/neg/forgot-interpolator.scala | 15 +++++++++++++++ 5 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/forgot-interpolator.check create mode 100644 test/files/neg/forgot-interpolator.flags create mode 100644 test/files/neg/forgot-interpolator.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 91ebd798e1..2dbfa1d0d3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -189,7 +189,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int] - if (versionFormat != pickleVersionFormat) throw new Error("macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat") + if (versionFormat != pickleVersionFormat) throw new Error(s"macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat") val className = payload("className").asInstanceOf[String] val methodName = payload("methodName").asInstanceOf[String] diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index eaf57cd39c..33f1ed3386 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -86,6 +86,9 @@ trait Typers extends Adaptations with Tags { // that are turned private by typedBlock private final val SYNTHETIC_PRIVATE = TRANS_FLAG + private final val InterpolatorCodeRegex = """\$\{.*?\}""".r + private final val InterpolatorIdentRegex = """\$\w+""".r + // To enable decent error messages when the typer crashes. // TODO - this only catches trees which go through def typed, // but there are all kinds of back ways - typedClassDef, etc. etc. @@ -5151,6 +5154,19 @@ trait Typers extends Adaptations with Tags { def typedLiteral(tree: Literal) = { val value = tree.value + // Warn about likely interpolated strings which are missing their interpolators + if (settings.lint.value) value match { + case Constant(s: String) => + def names = InterpolatorIdentRegex findAllIn s map (n => newTermName(n stripPrefix "$")) + val shouldWarn = ( + (InterpolatorCodeRegex findFirstIn s).nonEmpty + || (names exists (n => context.lookupSymbol(n, _ => true).symbol.exists)) + ) + if (shouldWarn) + unit.warning(tree.pos, "looks like an interpolated String; did you forget the interpolator?") + case _ => + } + tree setType ( if (value.tag == UnitTag) UnitClass.tpe else ConstantType(value)) diff --git a/test/files/neg/forgot-interpolator.check b/test/files/neg/forgot-interpolator.check new file mode 100644 index 0000000000..f6de4d7b3a --- /dev/null +++ b/test/files/neg/forgot-interpolator.check @@ -0,0 +1,9 @@ +forgot-interpolator.scala:4: warning: looks like an interpolated String; did you forget the interpolator? + def f = "Put the $bippy in the $bippy!" // warn + ^ +forgot-interpolator.scala:14: warning: looks like an interpolated String; did you forget the interpolator? + def f = """Put the ${println("bippy")} in the bippy!""" // warn + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/forgot-interpolator.flags b/test/files/neg/forgot-interpolator.flags new file mode 100644 index 0000000000..7949c2afa2 --- /dev/null +++ b/test/files/neg/forgot-interpolator.flags @@ -0,0 +1 @@ +-Xlint -Xfatal-warnings diff --git a/test/files/neg/forgot-interpolator.scala b/test/files/neg/forgot-interpolator.scala new file mode 100644 index 0000000000..d67db82643 --- /dev/null +++ b/test/files/neg/forgot-interpolator.scala @@ -0,0 +1,15 @@ +class A { + val bippy = 123 + + def f = "Put the $bippy in the $bippy!" // warn +} + +class B { + val dingus = 123 + + def f = "Put the $bippy in the $bippy!" // no warn +} + +class C { + def f = """Put the ${println("bippy")} in the bippy!""" // warn +} -- cgit v1.2.3 From fc5e5581ec2eb91d22cbc8a2f19729c7c9a87254 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 11 Mar 2013 22:11:20 -0700 Subject: Eliminate a bunch of -Xlint warnings. Mostly unused private code, unused imports, and points where an extra pair of parentheses is necessary for scalac to have confidence in our intentions. --- src/compiler/scala/tools/nsc/Global.scala | 2 -- src/compiler/scala/tools/nsc/ast/Positions.scala | 2 -- src/compiler/scala/tools/nsc/ast/Trees.scala | 5 ++- .../scala/tools/nsc/ast/parser/Parsers.scala | 16 ++++----- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 10 +++--- .../scala/tools/nsc/backend/JavaPlatform.scala | 2 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 42 +++++++++++----------- .../scala/tools/nsc/backend/jvm/GenASM.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenJVMASM.scala | 1 - .../nsc/backend/opt/DeadCodeElimination.scala | 4 +-- .../scala/tools/nsc/settings/MutableSettings.scala | 8 ++--- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- .../scala/tools/nsc/transform/Constructors.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 1 - .../tools/nsc/transform/SpecializeTypes.scala | 2 +- .../scala/tools/nsc/transform/UnCurry.scala | 4 +-- .../scala/tools/nsc/transform/patmat/Logic.scala | 11 +++--- .../tools/nsc/transform/patmat/MatchAnalysis.scala | 24 ++++++------- .../tools/nsc/transform/patmat/MatchCodeGen.scala | 10 ++---- .../nsc/transform/patmat/MatchOptimization.scala | 13 ++----- .../nsc/transform/patmat/MatchTranslation.scala | 2 +- .../nsc/transform/patmat/MatchTreeMaking.scala | 36 ++++++++----------- .../scala/tools/nsc/transform/patmat/Solving.scala | 7 ---- .../tools/nsc/typechecker/ContextErrors.scala | 1 - .../scala/tools/nsc/typechecker/Contexts.scala | 20 +++++------ .../scala/tools/nsc/typechecker/Duplicators.scala | 2 +- .../scala/tools/nsc/typechecker/Implicits.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 4 +-- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 1 - .../scala/tools/nsc/typechecker/Typers.scala | 9 +++-- src/compiler/scala/tools/reflect/FastTrack.scala | 2 +- .../scala/tools/reflect/ToolBoxFactory.scala | 6 ++-- .../tools/nsc/interactive/CompilerControl.scala | 2 +- src/reflect/scala/reflect/api/Trees.scala | 4 +-- src/reflect/scala/reflect/internal/TreeGen.scala | 2 +- .../scala/tools/nsc/interpreter/ExprTyper.scala | 1 - .../tools/nsc/interpreter/MemberHandlers.scala | 1 - .../tools/nsc/doc/base/CommentFactoryBase.scala | 1 - src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala | 2 -- .../tools/nsc/doc/base/MemberLookupBase.scala | 2 +- .../scala/tools/nsc/doc/model/ModelFactory.scala | 1 - .../doc/model/ModelFactoryImplicitSupport.scala | 2 +- 43 files changed, 113 insertions(+), 162 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 7ee3ee551f..a8e3c29ff9 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -795,8 +795,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } reverse } - private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name) - // ------------ Invalidations --------------------------------- /** Is given package class a system package class that cannot be invalidated? diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index 63a2dd0ee7..66d75969e9 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -1,8 +1,6 @@ package scala.tools.nsc package ast -import scala.reflect.internal.util.{ SourceFile, OffsetPosition } - trait Positions extends scala.reflect.internal.Positions { self: Global => diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 6c5c087d55..8391ebdafc 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -100,12 +100,11 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => if (body forall treeInfo.isInterfaceMember) List() else List( atPos(wrappingPos(superPos, lvdefs)) ( - DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant()))))) + DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant(())))))) } else { // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit) vparamss1 = List() :: vparamss1 - val superRef: Tree = atPos(superPos)(gen.mkSuperInitCall) val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass // this requires knowing which of the parents is a type macro and which is not // and that's something that cannot be found out before typer @@ -116,7 +115,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)` // is it going to be a problem that we can no longer include the `argss`? atPos(wrappingPos(superPos, lvdefs)) ( - DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant()))))) + DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant(())))))) } } constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false)) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 9218ad3330..bd7881996c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -57,7 +57,7 @@ trait ParsersCommon extends ScannersCommon { if (in.token == LPAREN) inParens(body) else { accept(LPAREN) ; alt } - @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant())) + @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant(()))) @inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil) @inline final def inBraces[T](body: => T): T = { @@ -71,7 +71,7 @@ trait ParsersCommon extends ScannersCommon { else { accept(LBRACE) ; alt } @inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil) - @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant())) + @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant(()))) @inline final def dropAnyBraces[T](body: => T): T = if (in.token == LBRACE) inBraces(body) else body @@ -1249,7 +1249,7 @@ self => newLinesOpt() val thenp = expr() val elsep = if (in.token == ELSE) { in.nextToken(); expr() } - else Literal(Constant()) + else Literal(Constant(())) If(cond, thenp, elsep) } parseIf @@ -1323,7 +1323,7 @@ self => case RETURN => def parseReturn = atPos(in.skipToken()) { - Return(if (isExprIntro) expr() else Literal(Constant())) + Return(if (isExprIntro) expr() else Literal(Constant(()))) } parseReturn case THROW => @@ -2524,7 +2524,7 @@ self => */ def constrExpr(vparamss: List[List[ValDef]]): Tree = if (in.token == LBRACE) constrBlock(vparamss) - else Block(List(selfInvocation(vparamss)), Literal(Constant())) + else Block(List(selfInvocation(vparamss)), Literal(Constant(()))) /** {{{ * SelfInvocation ::= this ArgumentExprs {ArgumentExprs} @@ -2554,7 +2554,7 @@ self => else Nil } accept(RBRACE) - Block(stats, Literal(Constant())) + Block(stats, Literal(Constant(()))) } /** {{{ @@ -2760,7 +2760,7 @@ self => def anyvalConstructor() = ( // Not a well-formed constructor, has to be finished later - see note // regarding AnyVal constructor in AddInterfaces. - DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant()))) + DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant(())))) ) val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart @@ -2986,7 +2986,7 @@ self => else List(tmplDef(pos, mods)) in.token match { - case RBRACE | CASE => defs :+ (Literal(Constant()) setPos o2p(in.offset)) + case RBRACE | CASE => defs :+ (Literal(Constant(())) setPos o2p(in.offset)) case _ => defs } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index d70b1f4d9c..598bd82697 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -131,7 +131,7 @@ abstract class TreeBuilder { } def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { - case Nil => Literal(Constant()) + case Nil => Literal(Constant(())) case List(tree) if flattenUnary => tree case _ => makeTuple(trees, isType = false) } @@ -250,21 +250,21 @@ abstract class TreeBuilder { /** Create tree representing a while loop */ def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = { val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) } - val rhs = If(cond, Block(List(body), continu), Literal(Constant())) + val rhs = If(cond, Block(List(body), continu), Literal(Constant(()))) LabelDef(lname, Nil, rhs) } /** Create tree representing a do-while loop */ def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = { val continu = Apply(Ident(lname), Nil) - val rhs = Block(List(body), If(cond, continu, Literal(Constant()))) + val rhs = Block(List(body), If(cond, continu, Literal(Constant(())))) LabelDef(lname, Nil, rhs) } /** Create block of statements `stats` */ def makeBlock(stats: List[Tree]): Tree = - if (stats.isEmpty) Literal(Constant()) - else if (!stats.last.isTerm) Block(stats, Literal(Constant())) + if (stats.isEmpty) Literal(Constant(())) + else if (!stats.last.isTerm) Block(stats, Literal(Constant(()))) else if (stats.length == 1) stats.head else Block(stats.init, stats.last) diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 08602f87dc..00f2933fab 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -7,7 +7,7 @@ package scala.tools.nsc package backend import io.AbstractFile -import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath} +import util.{ClassPath,MergedClassPath,DeltaClassPath} import scala.tools.util.PathResolver trait JavaPlatform extends Platform { diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 94116d6783..793effdc59 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -276,8 +276,8 @@ abstract class GenICode extends SubComponent { generatedType = elem ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos) // it's tempting to just drop array loads of type Null instead - // of adapting them but array accesses can cause - // ArrayIndexOutOfBounds so we can't. Besides, Array[Null] + // of adapting them but array accesses can cause + // ArrayIndexOutOfBounds so we can't. Besides, Array[Null] // probably isn't common enough to figure out an optimization adaptNullRef(generatedType, expectedType, ctx1, tree.pos) } @@ -479,11 +479,11 @@ abstract class GenICode extends SubComponent { val resCtx: Context = tree match { case LabelDef(name, params, rhs) => def genLoadLabelDef = { - val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because + val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because // label defs can be the target of jumps from other locations. // that means label defs can lead to unreachable code without // proper reachability analysis - + if (nme.isLoopHeaderLabel(name)) ctx1.bb.loopHeader = true @@ -1018,7 +1018,7 @@ abstract class GenICode extends SubComponent { resCtx } - + /** * If we have a method call, field load, or array element load of type Null then * we need to convince the JVM that we have a null value because in Scala @@ -1030,9 +1030,9 @@ abstract class GenICode extends SubComponent { */ private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) { log(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)") - + // Don't need to adapt null to unit because we'll just drop it anyway. Don't - // need to adapt to Object or AnyRef because the JVM is happy with + // need to adapt to Object or AnyRef because the JVM is happy with // upcasting Null to them. // We do have to adapt from NullReference to NullReference because we could be storing // this value into a local of type Null and we want the JVM to see that it's @@ -1066,12 +1066,12 @@ abstract class GenICode extends SubComponent { ctx.bb.enterIgnoreMode() case _ if from isAssignabledTo to => () - case (_, UNIT) => + case (_, UNIT) => ctx.bb.emit(DROP(from), pos) // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem - case _ if !from.isRefOrArrayType && !to.isRefOrArrayType => + case _ if !from.isRefOrArrayType && !to.isRefOrArrayType => coerce(from, to) - case _ => + case _ => assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos") } } @@ -1356,10 +1356,10 @@ abstract class GenICode extends SubComponent { ctx: Context, thenCtx: Context, elseCtx: Context): Boolean = - { + { /** * Generate the de-sugared comparison mechanism that will underly an '==' - * + * * @param l left-hand side of the '==' * @param r right-hand side of the '==' * @param code the comparison operator to use @@ -1961,34 +1961,34 @@ abstract class GenICode extends SubComponent { * }), (AnotherExceptionClass, * ctx => {... * } ))` - * + * * The resulting structure will look something like - * + * * outer: * // this 'useless' jump will be removed later, * // for now it separates the try body's blocks from previous * // code since the try body needs its own exception handlers * JUMP body - * + * * body: * [ try body ] * JUMP normalExit - * + * * catch[i]: * [ handler[i] body ] * JUMP normalExit - * + * * catchAll: * STORE exception * [ finally body ] * THROW exception - * + * * normalExit: * [ finally body ] - * + * * each catch[i] will cover body. catchAll will cover both body and each catch[i] * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers. - * + * * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally. * Later reachability analysis will remove unreacahble code. @@ -2047,7 +2047,7 @@ abstract class GenICode extends SubComponent { exhEndCtx.bb.enterIgnoreMode() finalizerCtx.endHandler() } - + // Generate each exception handler for ((sym, kind, handler) <- handlers) { val exh = this.newExceptionHandler(sym, tree.pos) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 4a3d1805d9..8d9c4290ce 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2634,7 +2634,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if(from == to) { return } // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) - assert(from != BOOL && to != BOOL, "inconvertible types : $from -> $to") + assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala index 50fd59b23f..2ad474cf3f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala @@ -67,7 +67,6 @@ trait GenJVMASM { // Before erasure so we can identify generic mains. enteringErasure { val companion = sym.linkedClassOfClass - val companionMain = companion.tpe.member(nme.main) if (hasJavaMainMethod(companion)) failNoForwarder("companion contains its own main method") diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index a9c8104e79..7042d7a042 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -218,7 +218,7 @@ abstract class DeadCodeElimination extends SubComponent { // worklist so we also mark their reaching defs as useful - see SI-7060 if (!useful(bb)(idx)) { useful(bb) += idx - dropOf.get(bb, idx) foreach { + dropOf.get((bb, idx)) foreach { for ((bb1, idx1) <- _) { /* * SI-7060: A drop that we now mark as useful can be reached via several paths, @@ -346,7 +346,7 @@ abstract class DeadCodeElimination extends SubComponent { if (useful(bb)(idx)) { debuglog(" * " + i + " is useful") bb.emit(i, i.pos) - compensations.get(bb, idx) match { + compensations.get((bb, idx)) match { case Some(is) => is foreach bb.emit case None => () } diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 5fa3594128..0c72ce1d8e 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -486,8 +486,6 @@ class MutableSettings(val errorFn: String => Unit) descr: String, default: ScalaVersion) extends Setting(name, descr) { - import ScalaVersion._ - type T = ScalaVersion protected var v: T = NoScalaVersion @@ -495,14 +493,14 @@ class MutableSettings(val errorFn: String => Unit) value = default Some(args) } - + override def tryToSetColon(args: List[String]) = args match { case Nil => value = default; Some(Nil) case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs) } - + override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s)) - + def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}") withHelpSyntax(s"${name}:<${arg}>") diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 5fbc15f858..21fef02e6d 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -276,7 +276,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => */ private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] = if (treeInfo.firstConstructor(stats) != EmptyTree) stats - else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant()))) :: stats + else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant(())))) :: stats private def implTemplate(clazz: Symbol, templ: Template): Template = atPos(templ.pos) { val templ1 = ( diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index a4a6c3ff31..395645161f 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -366,7 +366,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { CODE.NOT ( Apply(gen.mkAttributedRef(specializedFlag), List())), List()), - Block(stats, Literal(Constant())), + Block(stats, Literal(Constant(()))), EmptyTree) List(localTyper.typed(tree)) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 55b9ce1be9..348a18de0e 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -21,7 +21,6 @@ abstract class Erasure extends AddInterfaces import global._ import definitions._ import CODE._ - import treeInfo._ val phaseName: String = "erasure" diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0cd7f516ef..1998348a70 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1527,7 +1527,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (symbol.isConstructor) { val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner)) if (symbol.isPrimaryConstructor) - localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant())))) + localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(()))))) else // duplicate the original constructor reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef) } diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 94ca1206b9..e4205f0635 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -550,8 +550,6 @@ abstract class UnCurry extends InfoTransform false } - def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty - tree match { /* Some uncurry post transformations add members to templates. * @@ -701,7 +699,7 @@ abstract class UnCurry extends InfoTransform // update the type of the method after uncurry. dd.symbol updateInfo { val GenPolyType(tparams, tp) = dd.symbol.info - logResult("erased dependent param types for ${dd.symbol.info}") { + logResult(s"erased dependent param types for ${dd.symbol.info}") { GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType)) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 69d9987b05..93abfb5280 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -6,7 +6,6 @@ package scala.tools.nsc.transform.patmat -import scala.tools.nsc.symtab._ import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Statistics @@ -497,11 +496,11 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { uniques.get(tp).getOrElse( uniques.find {case (oldTp, oldC) => oldTp =:= tp} match { case Some((_, c)) => - debug.patmat("unique const: "+ (tp, c)) + debug.patmat("unique const: "+ ((tp, c))) c case _ => val fresh = mkFresh - debug.patmat("uniqued const: "+ (tp, fresh)) + debug.patmat("uniqued const: "+ ((tp, fresh))) uniques(tp) = fresh fresh }) @@ -517,12 +516,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { if (!t.symbol.isStable) t.tpe.narrow else trees find (a => a.correspondsStructure(t)(sameValue)) match { case Some(orig) => - debug.patmat("unique tp for tree: "+ (orig, orig.tpe)) + debug.patmat("unique tp for tree: "+ ((orig, orig.tpe))) orig.tpe case _ => // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?) val treeWithNarrowedType = t.duplicate setType t.tpe.narrow - debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe)) + debug.patmat("uniqued: "+ ((t, t.tpe, treeWithNarrowedType.tpe))) trees += treeWithNarrowedType treeWithNarrowedType.tpe } @@ -623,4 +622,4 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { override def toString = "null" } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 3ee75df6c4..006c7bd85f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -12,14 +12,10 @@ import scala.reflect.internal.util.Statistics import scala.reflect.internal.util.Position trait TreeAndTypeAnalysis extends Debugging { - import global.{Tree, Type, Symbol, definitions, analyzer, - ConstantType, Literal, Constant, appliedType, WildcardType, TypeRef, ModuleClassSymbol, - nestedMemberType, TypeMap, Ident} - + import global._ import definitions._ import analyzer.Typer - // we use subtyping as a model for implication between instanceof tests // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T] // unfortunately this is not true in general: @@ -60,7 +56,7 @@ trait TreeAndTypeAnalysis extends Debugging { Some(List(tp)) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte case sym if !sym.isSealed || isPrimitiveValueClass(sym) => - debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym))) + debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))) None case sym => val subclasses = ( @@ -68,7 +64,7 @@ trait TreeAndTypeAnalysis extends Debugging { // symbols which are both sealed and abstract need not be covered themselves, because // all of their children must be and they cannot otherwise be created. filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))) - debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses)) + debug.patmat("enum sealed -- subclasses: "+ ((sym, subclasses))) val tpApprox = typer.infer.approximateAbstracts(tp) val pre = tpApprox.prefix @@ -86,7 +82,7 @@ trait TreeAndTypeAnalysis extends Debugging { if (subTpApprox <:< tpApprox) Some(checkableType(subTp)) else None }) - debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes) + debug.patmat("enum sealed "+ ((tp, tpApprox)) + " as "+ validSubTypes) Some(validSubTypes) } @@ -108,7 +104,7 @@ trait TreeAndTypeAnalysis extends Debugging { } val res = typeArgsToWildcardsExceptArray(tp) - debug.patmat("checkable "+(tp, res)) + debug.patmat("checkable "+((tp, res))) res } @@ -126,8 +122,8 @@ trait TreeAndTypeAnalysis extends Debugging { } trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking { - import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope} - import global.definitions.{ListClass, NilModule} + import global._ + import global.definitions._ /** * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types) @@ -344,8 +340,8 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT trait MatchAnalysis extends MatchApproximation { import PatternMatchingStats._ - import global.{Tree, Type, Symbol, NoSymbol, Ident, Select} - import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol} + import global._ + import global.definitions._ trait MatchAnalyzer extends MatchApproximator { def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg) @@ -636,7 +632,7 @@ trait MatchAnalysis extends MatchApproximation { def toCounterExample(beBrief: Boolean = false): CounterExample = if (!allFieldAssignmentsLegal) NoExample else { - debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal)) + debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))) val res = prunedEqualTo match { // a definite assignment to a value case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index 416bdf50f0..889615a39f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -10,7 +10,6 @@ import scala.tools.nsc.symtab.Flags.SYNTHETIC import scala.language.postfixOps import scala.reflect.internal.util.Statistics import scala.reflect.internal.util.Position -import scala.reflect.internal.util.NoPosition /** Factory methods used by TreeMakers to make the actual trees. * @@ -18,10 +17,7 @@ import scala.reflect.internal.util.NoPosition * and pure (aka "virtualized": match is parametric in its monad). */ trait MatchCodeGen extends Interface { - import PatternMatchingStats._ - import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol, - appliedType, NoType, MethodType, newTermName, Name, - Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef} + import global._ import definitions._ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -79,7 +75,7 @@ trait MatchCodeGen extends Interface { // duplicated out of frustration with cast generation def mkZero(tp: Type): Tree = { tp.typeSymbol match { - case UnitClass => Literal(Constant()) + case UnitClass => Literal(Constant(())) case BooleanClass => Literal(Constant(false)) case FloatClass => Literal(Constant(0.0f)) case DoubleClass => Literal(Constant(0.0d)) @@ -255,4 +251,4 @@ trait MatchCodeGen extends Interface { } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index dcf2413b15..125e9a3b65 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -11,7 +11,6 @@ import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Statistics import scala.reflect.internal.util.Position -import scala.reflect.internal.util.NoPosition /** Optimize and analyze matches based on their TreeMaker-representation. * @@ -20,15 +19,9 @@ import scala.reflect.internal.util.NoPosition * TODO: split out match analysis */ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { - import PatternMatchingStats._ - import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos, - ConstantType, Literal, Constant, gen, EmptyTree, - Typed, treeInfo, nme, Ident, - Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw} - + import global._ import global.definitions._ - //// trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator { /** a flow-sensitive, generalised, common sub-expression elimination @@ -160,7 +153,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen) atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) - override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution) + override def toString = "Memo"+((nextBinder.name, storedCond.name, cond, res, substitution)) } case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._ @@ -199,7 +192,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S) casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate) } - override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution) + override def toString = "R"+((lastReusedTreeMaker.storedCond.name, substitution)) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 23b33e9be6..b05c1cd65c 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -243,7 +243,7 @@ trait MatchTranslation { self: PatternMatching => if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context) // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe) - debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType)) + debug.patmat("translateExtractorPattern checking parameter type: "+ ((patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))) // must use type `tp`, which is provided by extractor's result, not the type expected by binder, // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 202f3444f8..76268f3ecd 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -11,7 +11,6 @@ import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Statistics import scala.reflect.internal.util.Position -import scala.reflect.internal.util.NoPosition /** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen. * @@ -19,13 +18,8 @@ import scala.reflect.internal.util.NoPosition * mostly agnostic to whether we're in optimized/pure (virtualized) mode. */ trait MatchTreeMaking extends MatchCodeGen with Debugging { - import PatternMatchingStats._ - import global.{Tree, Type, Symbol, CaseDef, atPos, settings, - Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest, - ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser, - Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme} - - import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass} + import global._ + import definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass} final case class Suppression(exhaustive: Boolean, unreachable: Boolean) object Suppression { @@ -60,7 +54,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = { if (currSub ne null) { - debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst)) + debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ ((this, currSub, outerSubst))) Thread.dumpStack() } else currSub = outerSubst >> substitution @@ -100,7 +94,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree) atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here - override def toString = "B"+(body, matchPt) + override def toString = "B"+((body, matchPt)) } case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker { @@ -220,7 +214,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { ) } - override def toString = "X"+(extractor, nextBinder.name) + override def toString = "X"+((extractor, nextBinder.name)) } /** @@ -274,7 +268,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { } } - override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution) + override def toString = "P"+((prevBinder.name, extraCond getOrElse "", localSubstitution)) } object IrrefutableExtractorTreeMaker { @@ -389,7 +383,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { **/ case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker { import TypeTestTreeMaker._ - debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)) + debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))) lazy val outerTestNeeded = ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass) @@ -452,7 +446,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder)) - override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp) + override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp)) } // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp) @@ -463,7 +457,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required) val cond = codegen._equals(patTree, prevBinder) val res = CODE.REF(prevBinder) - override def toString = "ET"+(prevBinder.name, patTree) + override def toString = "ET"+((prevBinder.name, patTree)) } case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders { @@ -474,7 +468,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { altss = altss map (alts => propagateSubstitution(alts, substitution)) } - def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._ + def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { atPos(pos){ // one alternative may still generate multiple trees (e.g., an extractor call + equality test) // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers @@ -587,18 +581,18 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { t match { case Function(_, _) if t.symbol == NoSymbol => t.symbol = currentOwner.newAnonymousFunctionValue(t.pos) - debug.patmat("new symbol for "+ (t, t.symbol.ownerChain)) + debug.patmat("new symbol for "+ ((t, t.symbol.ownerChain))) case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) => - debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain)) + debug.patmat("fundef: "+ ((t, t.symbol.ownerChain, currentOwner.ownerChain))) t.symbol.owner = currentOwner case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) - debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) + debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain))) if(d.symbol.moduleClass ne NoSymbol) d.symbol.moduleClass.owner = currentOwner d.symbol.owner = currentOwner // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) => - debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)) + debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))) case _ => } super.traverse(t) @@ -611,4 +605,4 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // currentRun.trackerFactory.snapshot() } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 34cdbeba8e..a0fb6e82fc 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -13,13 +13,6 @@ import scala.reflect.internal.util.Statistics trait Solving extends Logic { import PatternMatchingStats._ trait CNF extends PropositionalLogic { - - /** Override Array creation for efficiency (to not go through reflection). */ - private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] { - def runtimeClass: java.lang.Class[Clause] = classOf[Clause] - final override def newArray(len: Int): Array[Clause] = new Array[Clause](len) - } - import scala.collection.mutable.ArrayBuffer type FormulaBuilder = ArrayBuffer[Clause] def formulaBuilder = ArrayBuffer[Clause]() diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 0af75a2aad..86ee7939c8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -20,7 +20,6 @@ trait ContextErrors { import global._ import definitions._ - import treeInfo._ object ErrorKinds extends Enumeration { type ErrorKind = Value diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 429bd7d682..836e0c3a38 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -594,11 +594,11 @@ trait Contexts { self: Analyzer => def restore(): Type = savedTypeBounds.foldLeft(tp) { case (current, (sym, savedInfo)) => def bounds_s(tb: TypeBounds) = if (tb.isEmptyBounds) "" else s"TypeBounds(lo=${tb.lo}, hi=${tb.hi})" //@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... => - val tb @ TypeBounds(lo, hi) = sym.info.bounds - val isUnique = lo <:< hi && hi <:< lo - val isPresent = current contains sym - def saved_s = bounds_s(savedInfo.bounds) - def current_s = bounds_s(sym.info.bounds) + val TypeBounds(lo, hi) = sym.info.bounds + val isUnique = lo <:< hi && hi <:< lo + val isPresent = current contains sym + def saved_s = bounds_s(savedInfo.bounds) + def current_s = bounds_s(sym.info.bounds) if (isUnique && isPresent) devWarningResult(s"Preserving inference: ${sym.nameString}=$hi in $current (based on $current_s) before restoring $sym to saved $saved_s")( @@ -962,15 +962,15 @@ trait Contexts { self: Analyzer => // import check from being misled by symbol lookups which are not // actually used. val other = lookupImport(imp2, requireExplicit = !sameDepth) - def imp1wins = { imports = imp1 :: imports.tail.tail } - def imp2wins = { impSym = other ; imports = imports.tail } + def imp1wins() = { imports = imp1 :: imports.tail.tail } + def imp2wins() = { impSym = other ; imports = imports.tail } if (!other.exists) // imp1 wins; drop imp2 and continue. - imp1wins + imp1wins() else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue. - imp2wins + imp2wins() else resolveAmbiguousImport(name, imp1, imp2) match { - case Some(imp) => if (imp eq imp1) imp1wins else imp2wins + case Some(imp) => if (imp eq imp1) imp1wins() else imp2wins() case _ => lookupError = ambiguousImports(imp1, imp2) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index ad45fc0354..b9e4b4f591 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -80,7 +80,7 @@ abstract class Duplicators extends Analyzer { BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match { case SilentResultValue(t) => sym1 = t.symbol - debuglog("fixed by trying harder: "+(sym, sym1, context)) + debuglog("fixed by trying harder: "+((sym, sym1, context))) case _ => } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 2331f82a58..4b84c21112 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -311,7 +311,7 @@ trait Implicits { (info2 == NoImplicitInfo) || (info1 != NoImplicitInfo) && { if (info1.sym.isStatic && info2.sym.isStatic) { - improvesCache get (info1, info2) match { + improvesCache get ((info1, info2)) match { case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b case None => val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 9f16f65a6a..15ed784ae0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1269,7 +1269,7 @@ trait Infer extends Checkable { // TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams Some(targs) } catch ifNoInstance { msg => - debuglog("NO INST "+ (tvars, tvars map (_.constr))) + debuglog("NO INST "+ ((tvars, tvars map (_.constr)))) NoConstructorInstanceError(tree, resTp, pt, msg) None } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b7221a78ec..a349881d6d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1124,7 +1124,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans * accessor for that field. The instance is created lazily, on first access. */ private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks { - val ModuleDef(mods, name, impl) = moduleDef + val ModuleDef(_, _, impl) = moduleDef val module = moduleDef.symbol val site = module.owner val moduleName = module.name.toTermName @@ -1485,7 +1485,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans private def transformIf(tree: If): Tree = { val If(cond, thenpart, elsepart) = tree def unitIfEmpty(t: Tree): Tree = - if (t == EmptyTree) Literal(Constant()).setPos(tree.pos).setType(UnitClass.tpe) else t + if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitClass.tpe) else t cond.tpe match { case ConstantType(value) => diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 5c863469e4..5575d680ef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -283,7 +283,6 @@ abstract class TreeCheckers extends Analyzer { private def checkSymbolRefsRespectScope(tree: Tree) { def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol) - def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol val info = Option(symbolOf(tree).info).getOrElse(NoType) val referencedSymbols: List[Symbol] = { val directRef = tree match { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 33f1ed3386..c45328a5d3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1134,7 +1134,7 @@ trait Typers extends Adaptations with Tags { if (settings.warnValueDiscard.value) context.unit.warning(tree.pos, "discarded non-Unit value") return typedPos(tree.pos, mode, pt) { - Block(List(tree), Literal(Constant())) + Block(List(tree), Literal(Constant(()))) } } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) { if (settings.warnNumericWiden.value) @@ -1248,7 +1248,7 @@ trait Typers extends Adaptations with Tags { val savedUndetparams = context.undetparams silent(_.instantiate(tree, mode, UnitClass.tpe)) orElse { _ => context.undetparams = savedUndetparams - val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant()))) + val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(())))) typed(valueDiscard, mode, UnitClass.tpe) } } @@ -1507,7 +1507,7 @@ trait Typers extends Adaptations with Tags { */ private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = { val app = treeInfo.dissectApplied(encodedtpt) - val (treeInfo.Applied(core, targs, argss), decodedtpt) = (app, app.callee) + val (treeInfo.Applied(core, targs, argss), decodedtpt) = ((app, app.callee)) val argssAreTrivial = argss == Nil || argss == ListOfNil // we cannot avoid cyclic references with `initialize` here, because when type macros arrive, @@ -2988,9 +2988,8 @@ trait Typers extends Adaptations with Tags { else if (isByNameParamType(formals.head)) NOmode else BYVALmode ) - var tree = typedArg(args.head, mode, typedMode, adapted.head) // formals may be empty, so don't call tail - tree :: loop(args.tail, formals drop 1, adapted.tail) + typedArg(args.head, mode, typedMode, adapted.head) :: loop(args.tail, formals drop 1, adapted.tail) } } loop(args0, formals0, adapted0) diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala index ac50324fa9..aa4ddc8ba8 100644 --- a/src/compiler/scala/tools/reflect/FastTrack.scala +++ b/src/compiler/scala/tools/reflect/FastTrack.scala @@ -27,7 +27,7 @@ trait FastTrack { final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree]) extends (MacroArgs => Any) { def validate(tree: Tree) = pf isDefinedAt Applied(tree) def apply(margs: MacroArgs) = { - val MacroArgs(c, args) = margs + val MacroArgs(c, _) = margs // Macros validated that the pf is defined here - and there's not much we could do if it weren't. c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing) } diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index e6bbe1dbed..3e227ce8aa 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -142,8 +142,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val expr1 = wrapper(transform(currentTyper, expr)) var (dummies1, unwrapped) = expr1 match { - case Block(dummies, unwrapped) => (dummies, unwrapped) - case unwrapped => (Nil, unwrapped) + case Block(dummies, unwrapped) => ((dummies, unwrapped)) + case unwrapped => ((Nil, unwrapped)) } val invertedIndex = freeTerms map (_.swap) // todo. also fixup singleton types @@ -369,7 +369,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => } private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches { - if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos) + if (compiler.settings.verbose.value) println(s"importing pt=$pt, tree=$tree, pos=$pos") val ctree: compiler.Tree = importer.importTree(tree) val cpt: compiler.Type = importer.importType(pt) val cpos: compiler.Position = importer.importPosition(pos) diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala index f84fa161c0..c5136c752b 100644 --- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -118,7 +118,7 @@ trait CompilerControl { self: Global => case ri: ReloadItem if ri.sources == sources => Some(ri) case _ => None } - superseeded.foreach(_.response.set()) + superseeded.foreach(_.response.set(())) postWorkItem(new ReloadItem(sources, response)) } diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 0170bf2032..18985fe83d 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -1038,7 +1038,7 @@ trait Trees { self: Universe => * `case` pat `if` guard => body * * If the guard is not present, the `guard` is set to `EmptyTree`. - * If the body is not specified, the `body` is set to `Literal(Constant())` + * If the body is not specified, the `body` is set to `Literal(Constant(()))` * @group Extractors */ abstract class CaseDefExtractor { @@ -1059,7 +1059,7 @@ trait Trees { self: Universe => def guard: Tree /** The body of the pattern matching clause. - * Is equal to `Literal(Constant())` if the body is not specified. + * Is equal to `Literal(Constant(()))` if the body is not specified. */ def body: Tree } diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index b2269e476f..d3e486311e 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -277,7 +277,7 @@ abstract class TreeGen extends macros.TreeBuilder { /** Builds a tuple */ def mkTuple(elems: List[Tree]): Tree = - if (elems.isEmpty) Literal(Constant()) + if (elems.isEmpty) Literal(Constant(())) else Apply( Select(mkAttributedRef(TupleClass(elems.length).caseModule), nme.apply), elems) diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala index 9edd54b939..55182a4f95 100644 --- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala @@ -13,7 +13,6 @@ trait ExprTyper { import repl._ import global.{ reporter => _, Import => _, _ } - import definitions._ import syntaxAnalyzer.UnitParser import naming.freshInternalVarName diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala index 84a47311e2..c6f0cca481 100644 --- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package interpreter import scala.collection.{ mutable, immutable } -import scala.reflect.internal.Flags._ import scala.language.implicitConversions trait MemberHandlers { diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index 2064d86860..ef0a013ff2 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -679,7 +679,6 @@ trait CommentFactoryBase { this: MemberLookupBase => val SchemeUri = """([a-z]+:.*)""".r jump("[[") val parens = 2 + repeatJump('[') - val start = "[" * parens val stop = "]" * parens //println("link with " + parens + " matching parens") val target = readUntil { check(stop) || check(" ") } diff --git a/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala index c11179800c..3d80f9da52 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package doc package base -import scala.collection._ - sealed trait LinkTo final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala index 8d80333195..671518fbc6 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala @@ -46,7 +46,7 @@ trait MemberLookupBase { } else "" def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = { - var members = breakMembers(query) + val members = breakMembers(query) // (1) First look in the root package, as most of the links are qualified val fromRoot = lookupInRootPackage(pos, members) diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 1df725636a..895cc84f39 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -4,7 +4,6 @@ package scala.tools.nsc package doc package model -import base._ import base.comment._ import diagram._ diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 868c2fc3a4..71b0a00e0a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -314,7 +314,7 @@ trait ModelFactoryImplicitSupport { }) case other => // this is likely an error on the lub/glb side - error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other) + error("Scaladoc implicits: Error computing lub/glb for: " + ((tparam, constr)) + ":\n" + other) Nil } } -- cgit v1.2.3 From 089cad8f436e1bc0935218937590897f5b9cbae4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 12 Mar 2013 07:39:54 -0700 Subject: Warn about locally identifiable init order issues. This warns about a subset of initialization order issues which can easily be identified by inspection, such as val x = y val y = 5 The likelihood of this formulation being intentional is miniscule. --- .../scala/tools/nsc/transform/Constructors.scala | 35 ++++++++++++++++++++++ test/files/neg/constructor-init-order.check | 9 ++++++ test/files/neg/constructor-init-order.flags | 1 + test/files/neg/constructor-init-order.scala | 23 ++++++++++++++ 4 files changed, 68 insertions(+) create mode 100644 test/files/neg/constructor-init-order.check create mode 100644 test/files/neg/constructor-init-order.flags create mode 100644 test/files/neg/constructor-init-order.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index a4a6c3ff31..886c790ec0 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -34,6 +34,41 @@ abstract class Constructors extends Transform with ast.TreeDSL { val stats = impl.body // the transformed template body val localTyper = typer.atOwner(impl, clazz) + // Inspect for obvious out-of-order initialization; concrete, eager vals or vars, + // declared in this class, for which a reference to the member precedes its definition. + def checkableForInit(sym: Symbol) = ( + (sym ne null) + && (sym.isVal || sym.isVar) + && !(sym hasFlag LAZY | DEFERRED | SYNTHETIC) + ) + val uninitializedVals = mutable.Set[Symbol]( + stats collect { case vd: ValDef if checkableForInit(vd.symbol) => vd.symbol.accessedOrSelf }: _* + ) + if (uninitializedVals.nonEmpty) + log("Checking constructor for init order issues among: " + uninitializedVals.map(_.name).mkString(", ")) + + for (stat <- stats) { + // Checking the qualifier symbol is necessary to prevent a selection on + // another instance of the same class from potentially appearing to be a forward + // reference on the member in the current class. + def check(tree: Tree) = { + for (t <- tree) t match { + case t: RefTree if uninitializedVals(t.symbol.accessedOrSelf) && t.qualifier.symbol == clazz => + unit.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}") + case _ => + } + } + stat match { + case vd: ValDef => + // doing this first allows self-referential vals, which to be a conservative + // warner we will do because it's possible though difficult for it to be useful. + uninitializedVals -= vd.symbol.accessedOrSelf + if (!vd.symbol.isLazy) + check(vd.rhs) + case _: MemberDef => // skip other member defs + case t => check(t) // constructor body statement + } + } val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE) val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED) diff --git a/test/files/neg/constructor-init-order.check b/test/files/neg/constructor-init-order.check new file mode 100644 index 0000000000..9ab6ac5923 --- /dev/null +++ b/test/files/neg/constructor-init-order.check @@ -0,0 +1,9 @@ +constructor-init-order.scala:7: warning: Reference to uninitialized value baz + val bar1 = baz // warn + ^ +constructor-init-order.scala:17: warning: Reference to uninitialized variable baz + var bar1 = baz // warn + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/constructor-init-order.flags b/test/files/neg/constructor-init-order.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/neg/constructor-init-order.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/neg/constructor-init-order.scala b/test/files/neg/constructor-init-order.scala new file mode 100644 index 0000000000..fe8fec87ad --- /dev/null +++ b/test/files/neg/constructor-init-order.scala @@ -0,0 +1,23 @@ +trait Foo0 { + val quux1: String + val quux2 = quux1 // warning here is "future work" +} + +class Foo1 extends Foo0 { + val bar1 = baz // warn + val bar2 = lazybaz // no warn + val bar3 = defbaz // no warn + val baz = "oops" + lazy val lazybaz = "ok" + def defbaz = "ok" + val quux1 = "oops" +} + +class Foo2 { + var bar1 = baz // warn + var bar2 = lazybaz // no warn + var bar3 = defbaz // no warn + var baz = "oops" + lazy val lazybaz = "ok" + def defbaz = "ok" +} -- cgit v1.2.3 From 6ef63e49f8d762ac02367225ee737ea93f52a738 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 13 Mar 2013 06:18:53 -0700 Subject: Fix it-never-happened performance regression. Diligent reviewer observed that a hot spot was possibly being made hotter. Reviewer's suggested remedy was a spectacular bust, but studious observation revealed the news lash that expensive methods are expensive and we should avoid calling them if we can. Put short-circuit test back in front of unapply call. Now the time spent in unapply is within a few percent. --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 29d4c8423b..5b11adf127 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -573,15 +573,16 @@ trait Implicits { ) def fail(reason: String): SearchResult = failure(itree, reason) + def fallback = typed1(itree, EXPRmode, wildPt) try { - val itree1 = pt match { - case Function1(arg1, arg2) if isView => + val itree1 = if (!isView) fallback else pt match { + case Function1(arg1, arg2) => typed1( atPos(itree.pos)(Apply(itree, List(Ident("") setType approximate(arg1)))), EXPRmode, approximate(arg2) ) - case _ => typed1(itree, EXPRmode, wildPt) + case _ => fallback } if (context.hasErrors) { log("implicit adapt failed: " + context.errBuffer.head.errMsg) -- cgit v1.2.3 From 2655a99237815704e67fff4ad5533df7c1d2d752 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 13 Mar 2013 16:09:31 -0700 Subject: Removed -Ynotnull setting. It never worked and it would periodically jump out and bite someone. Slash and burn so new plants can take root. Eliminated NotNullType and NotNullClass, internal elements. Removed notNull method on Type, which no longer has a purpose. Retained 'def isNotNull' because there are types which are known by construction not to be null (ThisType, ConstantType as long as the constant isn't null, etc.) and that's still useful to know. --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 21 ++++++++++--------- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 - .../scala/tools/nsc/typechecker/Typers.scala | 20 +++++------------- .../scala/reflect/internal/Definitions.scala | 3 +-- src/reflect/scala/reflect/internal/Importers.scala | 2 -- src/reflect/scala/reflect/internal/Types.scala | 24 ++++------------------ .../internal/settings/MutableSettings.scala | 1 - .../scala/reflect/internal/tpe/GlbLubs.scala | 8 ++------ .../scala/reflect/internal/tpe/TypeComparers.scala | 5 +---- .../scala/reflect/internal/tpe/TypeMaps.scala | 5 ----- .../scala/reflect/internal/transform/Erasure.scala | 2 +- src/reflect/scala/reflect/runtime/Settings.scala | 1 - 12 files changed, 25 insertions(+), 68 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index b9eb511a9a..d195d195af 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -19,16 +19,17 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { import global._ import definitions._ - def mkCheckInit(tree: Tree): Tree = { - val tpe = - if (tree.tpe != null || !tree.hasSymbolField) tree.tpe - else tree.symbol.tpe - - if (!global.phase.erasedTypes && settings.warnSelectNullable.value && - tpe <:< NotNullClass.tpe && !tpe.isNotNull) - mkRuntimeCall(nme.checkInitialized, List(tree)) - else - tree + def mkCheckInit(tree: Tree): Tree = { tree + // Leaving in the next commit... + // val tpe = + // if (tree.tpe != null || !tree.hasSymbolField) tree.tpe + // else tree.symbol.tpe + + // if (!global.phase.erasedTypes && settings.warnSelectNullable.value && + // tpe <:< NotNullClass.tpe && !tpe.isNotNull) + // mkRuntimeCall(nme.checkInitialized, List(tree)) + // else + // tree } /** Builds a fully attributed wildcard import node. diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 9469113238..7572b0ef90 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -166,7 +166,6 @@ trait ScalaSettings extends AbsScalaSettings val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") - val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.") val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.") val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition."). withDeprecationMessage("This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ba34ae4871..bc3440380c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4187,11 +4187,11 @@ trait Typers extends Adaptations with Tags { def typedArrayValue(tree: ArrayValue) = { val elemtpt1 = typedType(tree.elemtpt, mode) - val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe)) - treeCopy.ArrayValue(tree, elemtpt1, elems1) - .setType( - (if (isFullyDefined(pt) && !phase.erasedTypes) pt - else arrayType(elemtpt1.tpe)).notNull) + val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe)) + // see run/t6126 for an example where `pt` does not suffice (tagged types) + val tpe1 = if (isFullyDefined(pt) && !phase.erasedTypes) pt else arrayType(elemtpt1.tpe) + + treeCopy.ArrayValue(tree, elemtpt1, elems1) setType tpe1 } def typedAssign(lhs: Tree, rhs: Tree): Tree = { @@ -4738,16 +4738,6 @@ trait Typers extends Adaptations with Tags { (stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None) } - def isPotentialNullDeference() = { - !isPastTyper && - !sym.isConstructor && - !(qual.tpe <:< NotNullClass.tpe) && !qual.tpe.isNotNull && - !(List(Any_isInstanceOf, Any_asInstanceOf) contains result.symbol) // null.is/as is not a dereference - } - // unit is null here sometimes; how are we to know when unit might be null? (See bug #2467.) - if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null) - unit.warning(tree.pos, "potential null pointer dereference: "+tree) - result match { // could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual? case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index bfba81c654..55954196f6 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -281,7 +281,7 @@ trait Definitions extends api.StandardDefinitions { def Predef_AnyRef = AnyRefModule lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse { - val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, List(AnyClass.tpe, NotNullClass.tpe), ABSTRACT) + val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyClass.tpe :: Nil, ABSTRACT) val av_constr = anyval.newClassConstructor(NoPosition) anyval.info.decls enter av_constr anyval @@ -383,7 +383,6 @@ trait Definitions extends api.StandardDefinitions { lazy val StringAddClass = requiredClass[scala.runtime.StringAdd] lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc") // SI-5731 lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS) - lazy val NotNullClass = getRequiredClass("scala.NotNull") lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber] lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter] lazy val DelayedInitClass = requiredClass[scala.DelayedInit] diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 53410b29c5..f736202e13 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -248,8 +248,6 @@ trait Importers extends api.Importers { self: SymbolTable => AntiPolyType(importType(pre), targs map importType) case x: from.TypeVar => TypeVar(importType(x.origin), importTypeConstraint(x.constr), x.typeArgs map importType, x.params map importSymbol) - case from.NotNullType(tpe) => - NotNullType(importType(tpe)) case from.AnnotatedType(annots, tpe, selfsym) => AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym)) case from.ErrorType => diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index ce514e9a89..cfd41562b6 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -187,7 +187,6 @@ trait Types override def params: List[Symbol] = List() override def paramTypes: List[Type] = List() override def typeArgs = underlying.typeArgs - override def notNull = maybeRewrap(underlying.notNull) override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = underlying.instantiateTypeParams(formals, actuals) override def skolemizeExistential(owner: Symbol, origin: AnyRef) = underlying.skolemizeExistential(owner, origin) override def normalize = maybeRewrap(underlying.normalize) @@ -462,12 +461,9 @@ trait Types * the empty list for all other types */ def boundSyms: immutable.Set[Symbol] = emptySymbolSet - /** Mixin a NotNull trait unless type already has one - * ...if the option is given, since it is causing typing bugs. + /** Obsolete, here for backward compatibility. */ - def notNull: Type = - if (!settings.Ynotnull.value || isNotNull || phase.erasedTypes) this - else NotNullType(this) + @deprecated("This method will be removed", "2.11.0") def notNull: Type = this /** Replace formal type parameter symbols with actual type arguments. * @@ -1212,16 +1208,6 @@ trait Types override def isNotNull = supertype.isNotNull } - case class NotNullType(override val underlying: Type) extends SubType with RewrappingTypeProxy { - def supertype = underlying - protected def rewrap(newtp: Type): Type = NotNullType(newtp) - override def isNotNull: Boolean = true - override def notNull = this - override def deconst: Type = underlying.deconst //todo: needed? - override def safeToString: String = underlying.toString + " with NotNull" - override def kind = "NotNullType" - } - /** A base class for types that represent a single value * (single-types and this-types). */ @@ -2347,8 +2333,7 @@ trait Types override def typeSymbol = sym override def typeSymbolDirect = sym - override def isNotNull = - sym.isModuleClass || sym == NothingClass || (sym isNonBottomSubClass NotNullClass) || super.isNotNull + override def isNotNull = sym == NothingClass || super.isNotNull override def parents: List[Type] = { val cache = parentsCache @@ -4086,8 +4071,7 @@ trait Types protected[internal] def containsNull(sym: Symbol): Boolean = sym.isClass && sym != NothingClass && - !(sym isNonBottomSubClass AnyValClass) && - !(sym isNonBottomSubClass NotNullClass) + !(sym isNonBottomSubClass AnyValClass) def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean = tp.typeSymbol == NothingClass || diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 506edb861e..e7a1ea9311 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -35,7 +35,6 @@ abstract class MutableSettings extends AbsSettings { def overrideObjects: BooleanSetting def printtypes: BooleanSetting def debug: BooleanSetting - def Ynotnull: BooleanSetting def explaintypes: BooleanSetting def verbose: BooleanSetting def uniqid: BooleanSetting diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index bdccc75d6d..5bdc5f8a73 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -396,7 +396,7 @@ private[internal] trait GlbLubs { indent = indent stripSuffix " " println(indent + "lub of " + ts + " is " + res)//debug } - if (ts forall typeIsNotNull) res.notNull else res + res } val GlbFailure = new Throwable @@ -536,13 +536,9 @@ private[internal] trait GlbLubs { } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) - val res = glb0(ts) - + glb0(ts) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG - - if (ts exists typeIsNotNull) res.notNull else res } /** All types in list must be polytypes with type parameter lists of diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index a03ab1610e..2248d9bbfb 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -466,7 +466,6 @@ trait TypeComparers { def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = { val sym2 = tp2.sym sym2 match { - case NotNullClass => tp1.isNotNull case SingletonClass => tp1.isStable || fourthTry case _: ClassSymbol => if (isRawType(tp2)) @@ -502,8 +501,6 @@ trait TypeComparers { (rt2.decls forall (specializesSym(tp1, _, depth))) case et2: ExistentialType => et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry - case nn2: NotNullType => - tp1.isNotNull && isSubType(tp1, nn2.underlying, depth) case mt2: MethodType => tp1 match { case mt1 @ MethodType(params1, res1) => @@ -573,7 +570,7 @@ trait TypeComparers { } case RefinedType(parents1, _) => parents1 exists (isSubType(_, tp2, depth)) - case _: SingletonType | _: NotNullType => + case _: SingletonType => isSubType(tp1.underlying, tp2, depth) case _ => false diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 51363c0f82..d225f2f087 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -174,10 +174,6 @@ private[internal] trait TypeMaps { case tv@TypeVar(_, constr) => if (constr.instValid) this(constr.inst) else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty - case NotNullType(tp) => - val tp1 = this(tp) - if (tp1 eq tp) tp - else NotNullType(tp1) case AnnotatedType(annots, atp, selfsym) => val annots1 = mapOverAnnotations(annots) val atp1 = this(atp) @@ -1135,7 +1131,6 @@ private[internal] trait TypeMaps { case TypeBounds(_, _) => mapOver(tp) case TypeVar(_, _) => mapOver(tp) case AnnotatedType(_,_,_) => mapOver(tp) - case NotNullType(_) => mapOver(tp) case ExistentialType(_, _) => mapOver(tp) case _ => tp } diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index d83b4d71d9..b8a8e4d0c0 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -125,7 +125,7 @@ trait Erasure { if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe else if (args.head.typeSymbol.isBottomClass) ObjectArray else typeRef(apply(pre), sym, args map applyInArray) - else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) ErasedObject + else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ErasedObject else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass) else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 5d58fa96d6..6714bae1e0 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -33,7 +33,6 @@ private[reflect] class Settings extends MutableSettings { val XfullLubs = new BooleanSetting(false) val XnoPatmatAnalysis = new BooleanSetting(false) val Xprintpos = new BooleanSetting(false) - val Ynotnull = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) val Yposdebug = new BooleanSetting(false) val Yrangepos = new BooleanSetting(false) -- cgit v1.2.3 From a4c338888f4364dfd44989acd12cb72daf639081 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 13 Mar 2013 16:09:32 -0700 Subject: Remove -Xcheck-null setting. Maybe this was useful in some way; but no way I ever saw. I have comments which tell me this is exposed in the IDE so I left a stub. I also removed mkCheckInit. That probably sounds like it's related to -Xcheckinit. Guess again, guy-who-thinks-names-mean-things. It was only used by -Xcheck-null. --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 13 ------ .../scala/tools/nsc/settings/Warnings.scala | 12 +++--- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 46 ++++++++++------------ 5 files changed, 28 insertions(+), 47 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index d195d195af..692afbac66 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -19,19 +19,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { import global._ import definitions._ - def mkCheckInit(tree: Tree): Tree = { tree - // Leaving in the next commit... - // val tpe = - // if (tree.tpe != null || !tree.hasSymbolField) tree.tpe - // else tree.symbol.tpe - - // if (!global.phase.erasedTypes && settings.warnSelectNullable.value && - // tpe <:< NotNullClass.tpe && !tpe.isNotNull) - // mkRuntimeCall(nme.checkInitialized, List(tree)) - // else - // tree - } - /** Builds a fully attributed wildcard import node. */ def mkWildcardImport(pkg: Symbol): Import = { diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 2649a150ad..791d44153c 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -19,7 +19,6 @@ trait Warnings { // present form, but have the potential to offer useful info. protected def allWarnings = lintWarnings ++ List( warnDeadCode, - warnSelectNullable, warnValueDiscard, warnNumericWiden ) @@ -46,21 +45,20 @@ trait Warnings { allWarnings foreach (_.value = true) } ) + private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.") // Individual warnings. - val warnSelectNullable = BooleanSetting ("-Xcheck-null", "Warn upon selection of nullable reference.") val warnAdaptedArgs = BooleanSetting ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.") val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.") val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.") val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.") val warnNullaryUnit = BooleanSetting ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.") val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.") - val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", - "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.") + val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.") val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.") // Backward compatibility. - @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt - @deprecated("Use warnSelectNullable", "2.11.0") def Xchecknull = warnSelectNullable // used by ide - @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide + @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt + @deprecated("This option is being removed", "2.11.0") def Xchecknull = warnSelectNullable // used by ide + @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 988e80aa77..f7e3310f88 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -1060,7 +1060,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else if (needsInitFlag(sym)) mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym) else - gen.mkCheckInit(accessedRef) + accessedRef }) } else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) { diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 8c686107b4..5999a64b36 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -426,7 +426,7 @@ trait MethodSynthesis { Nil, Nil, tpt, - if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection) + if (mods.isDeferred) EmptyTree else fieldSelection ) setSymbol derivedSym } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index bc3440380c..765916bcdd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4494,35 +4494,31 @@ trait Typers extends Adaptations with Tags { reportError } } - silent(_.typed(fun, mode.forFunMode, funpt), - if (mode.inExprMode) false else context.ambiguousErrors, - if (mode.inExprMode) tree else context.tree) match { + val silentResult = silent( + op = _.typed(fun, mode.forFunMode, funpt), + reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors, + newtree = if (mode.inExprMode) tree else context.tree + ) + silentResult match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 if (Statistics.canEnable) Statistics.incCounter(typedApplyCount) - def isImplicitMethod(tpe: Type) = tpe match { - case mt: MethodType => mt.isImplicit - case _ => false - } - val useTry = ( - !isPastTyper - && fun2.isInstanceOf[Select] - && !isImplicitMethod(fun2.tpe) - && ((fun2.symbol eq null) || !fun2.symbol.isConstructor) - && (mode & (EXPRmode | SNDTRYmode)) == EXPRmode + val noSecondTry = ( + isPastTyper + || (fun2.symbol ne null) && fun2.symbol.isConstructor + || (fun2.tpe match { case mt: MethodType => mt.isImplicit case _ => false }) + ) + val isFirstTry = !noSecondTry && ( + fun2 match { + case Select(_, _) => mode inExprModeButNot SNDTRYmode + case _ => false + } ) - val res = - if (useTry) tryTypedApply(fun2, args) - else doTypedApply(tree, fun2, args, mode, pt) - - if (fun2.symbol == Array_apply && !res.isErrorTyped) { - val checked = gen.mkCheckInit(res) - // this check is needed to avoid infinite recursion in Duplicators - // (calling typed1 more than once for the same tree) - if (checked ne res) typed { atPos(tree.pos)(checked) } - else res - } else - res + if (isFirstTry) + tryTypedApply(fun2, args) + else + doTypedApply(tree, fun2, args, mode, pt) + case SilentTypeError(err) => onError({issue(err); setError(tree)}) } -- cgit v1.2.3 From 3a17ff00067f8f11288b1ddc778e193bed3ea017 Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 11 Mar 2013 13:26:11 -0700 Subject: Cleanup of constant optimization This commit cleans up constant optimization from the review of https://github.com/scala/scala/pull/2214 . * drops are done using the instruction's consumed count rather than a numeric literal * drops are moved into one common method in the main instruction interpreter * One instance of x.length > y.length is replaced with x.lengthCompare(y.length) > 0 * NaN is dealt with by treating it as an UNKNOWN * A test is added to make sure NaN semantics aren't broken. * The constant-optmization test is improved with tests for switch statements --- .../nsc/backend/opt/ConstantOptimization.scala | 271 ++++++++++----------- test/files/run/blame_eye_triple_eee.check | 9 + test/files/run/blame_eye_triple_eee.flags | 1 + test/files/run/blame_eye_triple_eee.scala | 61 +++++ test/files/run/constant-optimization.check | 3 + test/files/run/constant-optimization.flags | 1 + test/files/run/constant-optimization.scala | 43 ++++ 7 files changed, 245 insertions(+), 144 deletions(-) create mode 100644 test/files/run/blame_eye_triple_eee.check create mode 100644 test/files/run/blame_eye_triple_eee.flags create mode 100644 test/files/run/blame_eye_triple_eee.scala create mode 100644 test/files/run/constant-optimization.flags (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index b3da012e1a..b80acc2324 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -84,17 +84,17 @@ abstract class ConstantOptimization extends SubComponent { } /** - * True if this constant has the same representation (and therefore would compare true under eq) as another constant + * True if this constant would compare to other as true under primitive eq */ - override def equals(other: Any) = (other match { + override def equals(other: Any) = other match { case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value) case _ => false - }) + } /** - * Hash code based on representation of the constant, consistent with equals + * Hash code consistent with equals */ - override def hashCode = if (c.isIntRange) c.intValue else c.hashCode + override def hashCode = if (this.isIntAssignable) this.toInt else c.hashCode } /** @@ -296,142 +296,123 @@ abstract class ConstantOptimization extends SubComponent { /** * interpret a single instruction to find its impact on the abstract state */ - private def interpretInst(in: State, inst: Instruction): State = inst match { - case THIS(_) => - in load THIS_LOCAL - - case CONSTANT(k) => - in push SinglePossible(Const(k)) - - case LOAD_ARRAY_ITEM(_) => - in drop 2 push UNKNOWN - - case LOAD_LOCAL(local) => - // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant - in load local - - case LOAD_FIELD(_, isStatic) => - val drops = if (isStatic) 0 else 1 - in drop drops push UNKNOWN - - case LOAD_MODULE(_) => - in push NOT_NULL - - case STORE_ARRAY_ITEM(_) => - in drop 3 - - case STORE_LOCAL(local) => - in store local - - case STORE_THIS(_) => - // if a local is already known to have a constant and we're replacing with the same constant then we can - // replace this with a drop - in store THIS_LOCAL - - case STORE_FIELD(_, isStatic) => - val drops = if (isStatic) 1 else 2 - in drop drops - - case CALL_PRIMITIVE(_) => - in drop inst.consumed push UNKNOWN - - case CALL_METHOD(_, _) => - // TODO we could special case implementations of equals that are known, e.g. String#equals - // We could turn Possible(string constants).equals(Possible(string constants) into an eq check - // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString) - // and eliminate the null check that likely precedes this call - val initial = in drop inst.consumed - (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN } - - case BOX(_) => - val value = in peek 0 - // we simulate boxing by, um, boxing the possible/impossible contents - // so if we have Possible(1,2) originally then we'll end up with - // a Possible(Boxed(1), Boxed(2)) - // Similarly, if we know the input is not a 0 then we'll know the - // output is not a Boxed(0) - val newValue = value match { - case Possible(values) => Possible(values map Boxed) - case Impossible(values) => Impossible(values map Boxed) - } - in drop 1 push newValue - - case UNBOX(_) => - val value = in peek 0 - val newValue = value match { - // if we have a Possible, then all the possibilities - // should themselves be Boxes. In that - // case we can merge them to figure out what the UNBOX will produce - case Possible(inners) => - assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location") - val sanitized: Set[Contents] = (inners map { - case Boxed(content) => SinglePossible(content) - case _ => UNKNOWN - }) - sanitized reduce (_ merge _) - // if we have an impossible then the thing that's impossible - // should be a box. We'll unbox that to see what we get - case unknown@Impossible(inners) => - if (inners.isEmpty) { - unknown - } else { + private def interpretInst(in: State, inst: Instruction): State = { + // pop the consumed number of values off the `in` state's stack, producing a new state + def dropConsumed: State = in drop inst.consumed + + inst match { + case THIS(_) => + in load THIS_LOCAL + + case CONSTANT(k) => + // treat NaN as UNKNOWN because NaN must never equal NaN + val const = if (k.isNaN) UNKNOWN + else SinglePossible(Const(k)) + in push const + + case LOAD_ARRAY_ITEM(_) | LOAD_FIELD(_, _) | CALL_PRIMITIVE(_) => + dropConsumed push UNKNOWN + + case LOAD_LOCAL(local) => + // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant + in load local + + case STORE_LOCAL(local) => + in store local + + case STORE_THIS(_) => + // if a local is already known to have a constant and we're replacing with the same constant then we can + // replace this with a drop + in store THIS_LOCAL + + case CALL_METHOD(_, _) => + // TODO we could special case implementations of equals that are known, e.g. String#equals + // We could turn Possible(string constants).equals(Possible(string constants) into an eq check + // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString) + // and eliminate the null check that likely precedes this call + val initial = dropConsumed + (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN } + + case BOX(_) => + val value = in peek 0 + // we simulate boxing by, um, boxing the possible/impossible contents + // so if we have Possible(1,2) originally then we'll end up with + // a Possible(Boxed(1), Boxed(2)) + // Similarly, if we know the input is not a 0 then we'll know the + // output is not a Boxed(0) + val newValue = value match { + case Possible(values) => Possible(values map Boxed) + case Impossible(values) => Impossible(values map Boxed) + } + dropConsumed push newValue + + case UNBOX(_) => + val value = in peek 0 + val newValue = value match { + // if we have a Possible, then all the possibilities + // should themselves be Boxes. In that + // case we can merge them to figure out what the UNBOX will produce + case Possible(inners) => + assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location") val sanitized: Set[Contents] = (inners map { - case Boxed(content) => SingleImpossible(content) + case Boxed(content) => SinglePossible(content) case _ => UNKNOWN }) sanitized reduce (_ merge _) - } - } - in drop 1 push newValue - - case NEW(_) => - in push NOT_NULL - - case CREATE_ARRAY(_, dims) => - in drop dims push NOT_NULL - - case IS_INSTANCE(_) => - // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP - // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can - // know that whatever was checked was not a null - // see the TODO on CJUMP for more information about propagating null - // information - // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and - // replace with a constant false, but how often is a knowable null checked for instanceof? - // TODO we could track type information and statically know to eliminate IS_INSTANCE - // but that's probably not a huge win - in drop 1 push UNKNOWN // it's actually a Possible(true, false) but since the following instruction - // will be a conditional jump comparing to true or false there - // nothing to be gained by being more precise - - case CHECK_CAST(_) => - // TODO we could track type information and statically know to eliminate CHECK_CAST - // but that's probably not a huge win - in - - case DROP(_) => - in drop 1 - - case DUP(_) => - val value = in peek 0 - in push value - - case MONITOR_ENTER() => - in drop 1 - - case MONITOR_EXIT() => - in drop 1 - - case SCOPE_ENTER(_) | SCOPE_EXIT(_) => - in - - case LOAD_EXCEPTION(_) => - in push NOT_NULL - - case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) => - dumpClassesAndAbort("Unexpected block ending instruction: " + inst) - } + // if we have an impossible then the thing that's impossible + // should be a box. We'll unbox that to see what we get + case unknown@Impossible(inners) => + if (inners.isEmpty) { + unknown + } else { + val sanitized: Set[Contents] = (inners map { + case Boxed(content) => SingleImpossible(content) + case _ => UNKNOWN + }) + sanitized reduce (_ merge _) + } + } + dropConsumed push newValue + + case LOAD_MODULE(_) | NEW(_) | LOAD_EXCEPTION(_) => + in push NOT_NULL + case CREATE_ARRAY(_, _) => + dropConsumed push NOT_NULL + + case IS_INSTANCE(_) => + // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP + // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can + // know that whatever was checked was not a null + // see the TODO on CJUMP for more information about propagating null + // information + // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and + // replace with a constant false, but how often is a knowable null checked for instanceof? + // TODO we could track type information and statically know to eliminate IS_INSTANCE + // which might be a nice win under specialization + dropConsumed push UNKNOWN // it's actually a Possible(true, false) but since the following instruction + // will be a conditional jump comparing to true or false there + // nothing to be gained by being more precise + + case CHECK_CAST(_) => + // TODO we could track type information and statically know to eliminate CHECK_CAST + // but that's probably not a huge win + in + + case DUP(_) => + val value = in peek 0 + in push value + + case DROP(_) | MONITOR_ENTER() | MONITOR_EXIT() | STORE_ARRAY_ITEM(_) | STORE_FIELD(_, _) => + dropConsumed + + case SCOPE_ENTER(_) | SCOPE_EXIT(_) => + in + + case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) => + dumpClassesAndAbort("Unexpected block ending instruction: " + inst) + } + } /** * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return. * It will result in a map from target blocks to the input state computed for that block. It @@ -445,7 +426,7 @@ abstract class ConstantOptimization extends SubComponent { /** * common code for interpreting CJUMP and CZJUMP */ - def interpretConditional(kind: TypeKind, in: State, toDrop: Int, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = { + def interpretConditional(kind: TypeKind, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = { // TODO use reaching analysis to update the state in the two branches // e.g. if the comparison was checking null equality on local x // then the in the success branch we know x is null and @@ -476,7 +457,7 @@ abstract class ConstantOptimization extends SubComponent { case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE } - val out = in drop toDrop + val out = in drop inst.consumed var result = Map[BasicBlock, State]() if (succPossible) { @@ -487,8 +468,10 @@ abstract class ConstantOptimization extends SubComponent { result += ((failure, out)) } - if (result.size == 1) (result, List.fill(toDrop)(DROP(kind)) :+ JUMP(result.keySet.head)) - else (result, inst :: Nil) + val replacements = if (result.size == 1) List.fill(inst.consumed)(DROP(kind)) :+ JUMP(result.keySet.head) + else inst :: Nil + + (result, replacements) } inst match { @@ -498,18 +481,18 @@ abstract class ConstantOptimization extends SubComponent { case CJUMP(success, failure, cond, kind) => val in1 = in peek 0 val in2 = in peek 1 - interpretConditional(kind, in, 2, in1, in2, success, failure, cond) + interpretConditional(kind, in1, in2, success, failure, cond) case CZJUMP(success, failure, cond, kind) => val in1 = in peek 0 val in2 = getZeroOf(kind) - interpretConditional(kind, in, 1, in1, in2, success, failure, cond) + interpretConditional(kind, in1, in2, success, failure, cond) case SWITCH(tags, labels) => val in1 = in peek 0 val newStuff = tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) } val (reachableTags, reachableNormalLabels) = (tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) }).unzip - val reachableLabels = if (labels.size > tags.size) { + val reachableLabels = if (labels.lengthCompare(tags.length) > 0) { // if we've got an extra label then it's the default val defaultLabel = labels.last // see if the default is reachable by seeing if the input might be out of the set @@ -528,7 +511,7 @@ abstract class ConstantOptimization extends SubComponent { // are the same we need to merge State rather than clobber // alternative, maybe we should simplify the SWITCH to not have same target labels - val newState = in drop 1 + val newState = in drop inst.consumed val result = Map(reachableLabels map { label => (label, newState) }: _*) if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil) else (result, inst :: Nil) diff --git a/test/files/run/blame_eye_triple_eee.check b/test/files/run/blame_eye_triple_eee.check new file mode 100644 index 0000000000..5e46d91a8f --- /dev/null +++ b/test/files/run/blame_eye_triple_eee.check @@ -0,0 +1,9 @@ +if (NaN == NaN) is good +if (x == x) is good +if (x == NaN) is good +if (NaN != NaN) is good +if (x != x) is good +if (NaN != x) is good +x matching was good +NaN matching was good +loop with NaN was goood diff --git a/test/files/run/blame_eye_triple_eee.flags b/test/files/run/blame_eye_triple_eee.flags new file mode 100644 index 0000000000..c9b68d70dc --- /dev/null +++ b/test/files/run/blame_eye_triple_eee.flags @@ -0,0 +1 @@ +-optimise diff --git a/test/files/run/blame_eye_triple_eee.scala b/test/files/run/blame_eye_triple_eee.scala new file mode 100644 index 0000000000..1640aead40 --- /dev/null +++ b/test/files/run/blame_eye_triple_eee.scala @@ -0,0 +1,61 @@ +object Test extends App { + import Double.NaN + + // NaN must not equal NaN no matter what optimizations are applied + // All the following will seem redundant, but to an optimizer + // they can appear different + + val x = NaN + + if (NaN == NaN) + println("if (NaN == NaN) is broken") + else + println("if (NaN == NaN) is good") + + if (x == x) + println("if (x == x) is broken") + else + println("if (x == x) is good") + + if (x == NaN) + println("if (x == NaN) is broken") + else + println("if (x == NaN) is good") + + if (NaN != NaN) + println("if (NaN != NaN) is good") + else + println("if (NaN != NaN) broken") + + if (x != x) + println("if (x != x) is good") + else + println("if (x != x) broken") + + if (NaN != x) + println("if (NaN != x) is good") + else + println("if (NaN != x) is broken") + + x match { + case 0.0d => println("x matched 0!") + case NaN => println("x matched NaN!") + case _ => println("x matching was good") + } + + NaN match { + case 0.0d => println("NaN matched 0!") + case NaN => println("NaN matched NaN!") + case _ => println("NaN matching was good") + } + + var z = 0.0d + var i = 0 + while (i < 10) { + if (i % 2 == 0) z = NaN + else z = NaN + i += 1 + } + if (z.isNaN && i == 10) println("loop with NaN was goood") + else println("loop with NaN was broken") +} diff --git a/test/files/run/constant-optimization.check b/test/files/run/constant-optimization.check index 090e53ac40..957ffc5a87 100644 --- a/test/files/run/constant-optimization.check +++ b/test/files/run/constant-optimization.check @@ -1,2 +1,5 @@ testBothReachable: good testOneReachable: good +testAllReachable: good +testOneUnreachable: good +testDefaultUnreachable: good diff --git a/test/files/run/constant-optimization.flags b/test/files/run/constant-optimization.flags new file mode 100644 index 0000000000..c9b68d70dc --- /dev/null +++ b/test/files/run/constant-optimization.flags @@ -0,0 +1 @@ +-optimise diff --git a/test/files/run/constant-optimization.scala b/test/files/run/constant-optimization.scala index 86f981e13f..5d13272f3b 100644 --- a/test/files/run/constant-optimization.scala +++ b/test/files/run/constant-optimization.scala @@ -13,6 +13,49 @@ object Test extends App { println(s"testOneReachable: $y") } + def testAllReachable() { + val i = util.Random.nextInt + val y = (i % 2) match { + case 0 => "good" + case 1 => "good" + case _ => "good" + } + println(s"testAllReachable: $y") + } + + def testOneUnreachable() { + val i = util.Random.nextInt + val x = if (i % 2 == 0) { + 1 + } else { + 2 + } + val y = x match { + case 0 => "good" + case 1 => "good" + case _ => "good" + } + println(s"testOneUnreachable: $y") + } + + def testDefaultUnreachable() { + val i = util.Random.nextInt + val x = if (i % 2 == 0) { + 1 + } else { + 2 + } + val y = x match { + case 1 => "good" + case 2 => "good" + case _ => "good" + } + println(s"testDefaultUnreachable: $y") + } + testBothReachable() testOneReachable() + testAllReachable() + testOneUnreachable() + testDefaultUnreachable() } -- cgit v1.2.3 From ec6548fb91309b72bd46ff939f79bb253ca5953a Mon Sep 17 00:00:00 2001 From: "Paolo G. Giarrusso" Date: Sun, 20 Jan 2013 17:55:57 +0100 Subject: SI-6123: -explaintypes should not explain errors which won't be reported -explainTypes means that only type tests which *fail* should be reported in more detail by using explainTypes. Hence, callers of explainTypes should check if type errors are being ignored, by checking context.reportErrors. Hence, this check is added to Inferencer, and another call site is redirected to that method. Moreover, explainTypes should only be called if an error exists. Enforce that in checkSubType, and remove spurious home-made explainTypes output. Finally, in ContextErrors, stop checking `settings.explaintypes.value` before calling `explainTypes` which will check it again. Note that this patch does not fix all occurrences, but only the ones which showed up during debugging. The other ones never cause problems, maybe because they occur when contextErrors is in fact guaranteed to be true. We might want to fix those ones anyway. This fixes regressions in c800d1fec5241ed8c29e5af30465856f9b583246 and 78f9ef3906c78413ff8835fdad3849bfe5516be2. Thanks to hubertp (Hubert Plociniczak) for the first round of review. Refs #6123 backport to _2.10.x_ --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 8 ++++---- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 0af75a2aad..e1b16c5c24 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -173,8 +173,7 @@ trait ContextErrors { assert(!foundType.isErroneous && !req.isErroneous, (foundType, req)) issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(foundType, req, infer.isPossiblyMissingArgs(foundType, req))) ) - if (settings.explaintypes.value) - explainTypes(foundType, req) + infer.explainTypes(foundType, req) } def WithFilterError(tree: Tree, ex: AbsTypeError) = { @@ -1274,11 +1273,12 @@ trait ContextErrors { // not exactly an error generator, but very related // and I dearly wanted to push it away from Macros.scala private def checkSubType(slot: String, rtpe: Type, atpe: Type) = { - val ok = if (macroDebugVerbose || settings.explaintypes.value) { - if (rtpe eq atpe) println(rtpe + " <: " + atpe + "?" + EOL + "true") + val ok = if (macroDebugVerbose) { withTypesExplained(rtpe <:< atpe) } else rtpe <:< atpe if (!ok) { + if (!macroDebugVerbose) + explainTypes(rtpe, atpe) compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString))) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 9f16f65a6a..5113edb01d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -313,8 +313,10 @@ trait Infer extends Checkable { */ ) - def explainTypes(tp1: Type, tp2: Type) = - withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2)) + def explainTypes(tp1: Type, tp2: Type) = { + if (context.reportErrors) + withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2)) + } /* -- Tests & Checks---------------------------------------------------- */ -- cgit v1.2.3 From 2ba065f0ae434944566ca8fe76232af32ab8e21a Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Wed, 20 Mar 2013 12:37:52 +0400 Subject: Doc -> C-style comments for local symbols to avoid "discarding unmoored doc comment" warning when building distribution for scala itself. --- .../scala/reflect/reify/codegen/GenSymbols.scala | 2 +- .../scala/tools/ant/sabbus/ScalacFork.scala | 2 +- src/compiler/scala/tools/cmd/CommandLine.scala | 4 +- src/compiler/scala/tools/nsc/Global.scala | 4 +- src/compiler/scala/tools/nsc/ScriptRunner.scala | 12 +-- src/compiler/scala/tools/nsc/ast/DocComments.scala | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 36 ++++---- .../scala/tools/nsc/ast/parser/Scanners.scala | 24 +++--- .../tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 2 +- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 17 ++-- .../tools/nsc/backend/icode/BasicBlocks.scala | 12 +-- .../scala/tools/nsc/backend/icode/GenICode.scala | 26 +++--- .../tools/nsc/backend/icode/ICodeCheckers.scala | 26 +++--- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 24 +++--- .../scala/tools/nsc/backend/jvm/GenASM.scala | 48 +++++------ .../nsc/backend/opt/ConstantOptimization.scala | 4 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 16 ++-- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 6 +- .../nsc/symtab/classfile/ClassfileParser.scala | 16 ++-- .../tools/nsc/symtab/classfile/ICodeReader.scala | 4 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 4 +- .../scala/tools/nsc/transform/CleanUp.scala | 12 +-- .../scala/tools/nsc/transform/Constructors.scala | 47 +++++------ .../scala/tools/nsc/transform/Erasure.scala | 8 +- .../scala/tools/nsc/transform/InlineErasure.scala | 4 +- .../scala/tools/nsc/transform/LambdaLift.scala | 5 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 94 ++++++++++----------- .../scala/tools/nsc/transform/PostErasure.scala | 10 +-- .../tools/nsc/transform/SpecializeTypes.scala | 66 +++++++-------- .../scala/tools/nsc/transform/TailCalls.scala | 20 ++--- .../scala/tools/nsc/transform/UnCurry.scala | 3 +- .../scala/tools/nsc/transform/patmat/Logic.scala | 6 +- .../nsc/transform/patmat/MatchTranslation.scala | 36 ++++---- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 2 +- .../scala/tools/nsc/typechecker/EtaExpansion.scala | 7 +- .../scala/tools/nsc/typechecker/Implicits.scala | 28 +++---- .../scala/tools/nsc/typechecker/Infer.scala | 28 +++---- .../scala/tools/nsc/typechecker/Namers.scala | 22 ++--- .../tools/nsc/typechecker/NamesDefaults.scala | 4 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 51 ++++++------ .../tools/nsc/typechecker/SuperAccessors.scala | 3 +- .../tools/nsc/typechecker/SyntheticMethods.scala | 97 +++++++++++----------- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 82 +++++++++--------- src/compiler/scala/tools/nsc/util/ClassPath.scala | 2 +- .../scala/tools/nsc/util/ShowPickled.scala | 8 +- .../tools/selectivecps/SelectiveANFTransform.scala | 6 +- src/library/scala/collection/Iterator.scala | 2 +- src/library/scala/collection/SeqLike.scala | 4 +- .../scala/collection/immutable/NumericRange.scala | 8 +- src/library/scala/concurrent/SyncVar.scala | 6 +- src/library/scala/xml/parsing/MarkupParser.scala | 4 +- .../scala/tools/partest/nest/ConsoleRunner.scala | 2 +- .../reflect/internal/ClassfileConstants.scala | 2 +- src/reflect/scala/reflect/internal/Mirrors.scala | 4 +- src/reflect/scala/reflect/internal/Types.scala | 12 +-- .../reflect/internal/pickling/UnPickler.scala | 6 +- .../scala/reflect/internal/tpe/TypeComparers.scala | 36 ++++---- .../scala/reflect/internal/tpe/TypeMaps.scala | 2 +- .../scala/reflect/runtime/JavaMirrors.scala | 2 +- 62 files changed, 512 insertions(+), 528 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala index 67bc93d407..90fb41f80b 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala @@ -42,7 +42,7 @@ trait GenSymbols { else if (sym.isPackage) mirrorMirrorCall(nme.staticPackage, reify(sym.fullName)) else if (sym.isLocatable) { - /** This is a fancy conundrum that stems from the fact that Scala allows + /* This is a fancy conundrum that stems from the fact that Scala allows * packageless packages and packageless objects with the same names in the same program. * * For more details read the docs to staticModule and staticPackage. diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala index 76820b8060..363c31f6c4 100644 --- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala +++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala @@ -114,7 +114,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs { mapper ) map (x => new File(sourceDir, x)) - /** Nothing to do. */ + /* Nothing to do. */ if (includedFiles.isEmpty && argfile.isEmpty) return diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala index cf0463423c..e8ac882ee6 100644 --- a/src/compiler/scala/tools/cmd/CommandLine.scala +++ b/src/compiler/scala/tools/cmd/CommandLine.scala @@ -36,7 +36,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C def loop(args: List[String]): Map[String, String] = { def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() } - /** Returns Some(List(args)) if this option expands to an + /* Returns Some(List(args)) if this option expands to an * argument list and it's not returning only the same arg. */ def expand(s1: String) = { @@ -48,7 +48,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C else None } - /** Assumes known options have all been ruled out already. */ + /* Assumes known options have all been ruled out already. */ def isUnknown(opt: String) = onlyKnownOptions && (opt startsWith "-") && { errorFn("Option '%s' not recognized.".format(opt)) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 67c2b99475..85b1bc3b1c 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1114,7 +1114,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) override def currentRunId = curRunId def echoPhaseSummary(ph: Phase) = { - /** Only output a summary message under debug if we aren't echoing each file. */ + /* Only output a summary message under debug if we aren't echoing each file. */ if (settings.debug.value && !(settings.verbose.value || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } @@ -1208,7 +1208,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) curRunId += 1 curRun = this - /** Set phase to a newly created syntaxAnalyzer and call definitions.init. */ + /* Set phase to a newly created syntaxAnalyzer and call definitions.init. */ val parserPhase: Phase = syntaxAnalyzer.newPhase(NoPhase) phase = parserPhase definitions.init() diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 92b2dc79ed..821e88e52e 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -84,8 +84,8 @@ class ScriptRunner extends HasCompileSocket { { def mainClass = scriptMain(settings) - /** Compiles the script file, and returns the directory with the compiled - * class files, if the compilation succeeded. + /* Compiles the script file, and returns the directory with the compiled + * class files, if the compilation succeeded. */ def compile: Option[Directory] = { val compiledPath = Directory makeTemp "scalascript" @@ -96,8 +96,8 @@ class ScriptRunner extends HasCompileSocket { settings.outdir.value = compiledPath.path if (settings.nc.value) { - /** Setting settings.script.value informs the compiler this is not a - * self contained compilation unit. + /* Setting settings.script.value informs the compiler this is not a + * self contained compilation unit. */ settings.script.value = mainClass val reporter = new ConsoleReporter(settings) @@ -110,8 +110,8 @@ class ScriptRunner extends HasCompileSocket { else None } - /** The script runner calls sys.exit to communicate a return value, but this must - * not take place until there are no non-daemon threads running. Tickets #1955, #2006. + /* The script runner calls sys.exit to communicate a return value, but this must + * not take place until there are no non-daemon threads running. Tickets #1955, #2006. */ util.waitingForThreads { if (settings.save.value) { diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 3397797927..5ad494177c 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -501,7 +501,7 @@ trait DocComments { self: Global => result } - /** + /* * work around the backticks issue suggested by Simon in * https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74 * ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 9218ad3330..0396a871de 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -321,38 +321,38 @@ self => accept(EOF) def mainModuleName = newTermName(settings.script.value) - /** If there is only a single object template in the file and it has a - * suitable main method, we will use it rather than building another object - * around it. Since objects are loaded lazily the whole script would have - * been a no-op, so we're not taking much liberty. + /* If there is only a single object template in the file and it has a + * suitable main method, we will use it rather than building another object + * around it. Since objects are loaded lazily the whole script would have + * been a no-op, so we're not taking much liberty. */ def searchForMain(): Option[Tree] = { - /** Have to be fairly liberal about what constitutes a main method since - * nothing has been typed yet - for instance we can't assume the parameter - * type will look exactly like "Array[String]" as it could have been renamed - * via import, etc. + /* Have to be fairly liberal about what constitutes a main method since + * nothing has been typed yet - for instance we can't assume the parameter + * type will look exactly like "Array[String]" as it could have been renamed + * via import, etc. */ def isMainMethod(t: Tree) = t match { case DefDef(_, nme.main, Nil, List(_), _, _) => true case _ => false } - /** For now we require there only be one top level object. */ + /* For now we require there only be one top level object. */ var seenModule = false val newStmts = stmts collect { case t @ Import(_, _) => t case md @ ModuleDef(mods, name, template) if !seenModule && (md exists isMainMethod) => seenModule = true - /** This slightly hacky situation arises because we have no way to communicate - * back to the scriptrunner what the name of the program is. Even if we were - * willing to take the sketchy route of settings.script.value = progName, that - * does not work when using fsc. And to find out in advance would impose a - * whole additional parse. So instead, if the actual object's name differs from - * what the script is expecting, we transform it to match. + /* This slightly hacky situation arises because we have no way to communicate + * back to the scriptrunner what the name of the program is. Even if we were + * willing to take the sketchy route of settings.script.value = progName, that + * does not work when using fsc. And to find out in advance would impose a + * whole additional parse. So instead, if the actual object's name differs from + * what the script is expecting, we transform it to match. */ if (name == mainModuleName) md else treeCopy.ModuleDef(md, mods, mainModuleName, template) case _ => - /** If we see anything but the above, fail. */ + /* If we see anything but the above, fail. */ return None } Some(makePackaging(0, emptyPkg, newStmts)) @@ -2265,8 +2265,8 @@ self => accept(DOT) result } - /** Walks down import `foo.bar.baz.{ ... }` until it ends at a - * an underscore, a left brace, or an undotted identifier. + /* Walks down import `foo.bar.baz.{ ... }` until it ends at a + * an underscore, a left brace, or an undotted identifier. */ def loop(expr: Tree): Tree = { expr setPos expr.pos.makeTransparent diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 6ad1c50075..b485e862fd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -303,11 +303,11 @@ trait Scanners extends ScannersCommon { next.token = EMPTY } - /** Insert NEWLINE or NEWLINES if - * - we are after a newline - * - we are within a { ... } or on toplevel (wrt sepRegions) - * - the current token can start a statement and the one before can end it - * insert NEWLINES if we are past a blank line, NEWLINE otherwise + /* Insert NEWLINE or NEWLINES if + * - we are after a newline + * - we are within a { ... } or on toplevel (wrt sepRegions) + * - the current token can start a statement and the one before can end it + * insert NEWLINES if we are past a blank line, NEWLINE otherwise */ if (!applyBracePatch() && afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) && (sepRegions.isEmpty || sepRegions.head == RBRACE)) { @@ -440,7 +440,7 @@ trait Scanners extends ScannersCommon { nextChar() base = 16 } else { - /** + /* * What should leading 0 be in the future? It is potentially dangerous * to let it be base-10 because of history. Should it be an error? Is * there a realistic situation where one would need it? @@ -959,7 +959,7 @@ trait Scanners extends ScannersCommon { } token = INTLIT - /** When we know for certain it's a number after using a touch of lookahead */ + /* When we know for certain it's a number after using a touch of lookahead */ def restOfNumber() = { putChar(ch) nextChar() @@ -987,8 +987,8 @@ trait Scanners extends ScannersCommon { val lookahead = lookaheadReader val c = lookahead.getc() - /** As of scala 2.11, it isn't a number unless c here is a digit, so - * settings.future.value excludes the rest of the logic. + /* As of scala 2.11, it isn't a number unless c here is a digit, so + * settings.future.value excludes the rest of the logic. */ if (settings.future.value && !isDigit(c)) return setStrVal() @@ -998,16 +998,16 @@ trait Scanners extends ScannersCommon { case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => true - /** Backquoted idents like 22.`foo`. */ + /* Backquoted idents like 22.`foo`. */ case '`' => return setStrVal() /** Note the early return */ - /** These letters may be part of a literal, or a method invocation on an Int. + /* These letters may be part of a literal, or a method invocation on an Int. */ case 'd' | 'D' | 'f' | 'F' => !isIdentifierPart(lookahead.getc()) - /** A little more special handling for e.g. 5e7 */ + /* A little more special handling for e.g. 5e7 */ case 'e' | 'E' => val ch = lookahead.getc() !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-') diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index cdcfd0b834..f326212d5b 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -196,7 +196,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { uri1 } - /** Extract all the namespaces from the attribute map. */ + /* Extract all the namespaces from the attribute map. */ val namespaces: List[Tree] = for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield { val ns = splitPrefix(z) match { diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index d70b1f4d9c..897c6dfdb0 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -353,9 +353,9 @@ abstract class TreeBuilder { */ private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = { - /** make a closure pat => body. - * The closure is assigned a transparent position with the point at pos.point and - * the limits given by pat and body. + /* make a closure pat => body. + * The closure is assigned a transparent position with the point at pos.point and + * the limits given by pat and body. */ def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = { def splitpos = wrappingPos(List(pat, body)).withPoint(pos.point).makeTransparent @@ -371,26 +371,23 @@ abstract class TreeBuilder { } } - /** Make an application qual.meth(pat => body) positioned at `pos`. + /* Make an application qual.meth(pat => body) positioned at `pos`. */ def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree = Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos - /** If `pat` is not yet a `Bind` wrap it in one with a fresh name - */ + /* If `pat` is not yet a `Bind` wrap it in one with a fresh name */ def makeBind(pat: Tree): Tree = pat match { case Bind(_, _) => pat case _ => Bind(freshName(), pat) setPos pat.pos } - /** A reference to the name bound in Bind `pat`. - */ + /* A reference to the name bound in Bind `pat`. */ def makeValue(pat: Tree): Tree = pat match { case Bind(name, _) => Ident(name) setPos pat.pos.focus } - /** The position of the closure that starts with generator at position `genpos`. - */ + /* The position of the closure that starts with generator at position `genpos`. */ def closurePos(genpos: Position) = { val end = body.pos match { case NoPosition => genpos.point diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index d772dcb6c4..89682e91d2 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -68,10 +68,10 @@ trait BasicBlocks { addBlock(scratchBlocks.head) scratchBlocks = scratchBlocks.tail } - /** Return a list of successors for 'b' that come from exception handlers - * covering b's (non-exceptional) successors. These exception handlers - * might not cover 'b' itself. This situation corresponds to an - * exception being thrown as the first thing of one of b's successors. + /* Return a list of successors for 'b' that come from exception handlers + * covering b's (non-exceptional) successors. These exception handlers + * might not cover 'b' itself. This situation corresponds to an + * exception being thrown as the first thing of one of b's successors. */ while (scratchHandlers ne Nil) { val handler = scratchHandlers.head @@ -332,8 +332,8 @@ trait BasicBlocks { if (ignore) { if (settings.debug.value) { - /** Trying to pin down what it's likely to see after a block has been - * put into ignore mode so we hear about it if there's a problem. + /* Trying to pin down what it's likely to see after a block has been + * put into ignore mode so we hear about it if there's a problem. */ instr match { case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 94116d6783..1da1480de5 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -777,8 +777,8 @@ abstract class GenICode extends SubComponent { ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1) val cm = CALL_METHOD(sym, invokeStyle) - /** In a couple cases, squirrel away a little extra information in the - * CALL_METHOD for use by GenASM. + /* In a couple cases, squirrel away a little extra information in the + * CALL_METHOD for use by GenASM. */ fun match { case Select(qual, _) => @@ -1470,11 +1470,11 @@ abstract class GenICode extends SubComponent { ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR.toString) } - /** True if the equality comparison is between values that require the use of the rich equality - * comparator (scala.runtime.Comparator.equals). This is the case when either side of the - * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character. - * When it is statically known that both sides are equal and subtypes of Number of Character, - * not using the rich equality is possible (their own equals method will do ok.)*/ + /* True if the equality comparison is between values that require the use of the rich equality + * comparator (scala.runtime.Comparator.equals). This is the case when either side of the + * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character. + * When it is statically known that both sides are equal and subtypes of Number of Character, + * not using the rich equality is possible (their own equals method will do ok.)*/ def mustUseAnyComparator: Boolean = { def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe) !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol) @@ -1568,12 +1568,12 @@ abstract class GenICode extends SubComponent { debugassert(ctx.clazz.symbol eq cls, "Classes are not the same: " + ctx.clazz.symbol + ", " + cls) - /** Non-method term members are fields, except for module members. Module - * members can only happen on .NET (no flatten) for inner traits. There, - * a module symbol is generated (transformInfo in mixin) which is used - * as owner for the members of the implementation class (so that the - * backend emits them as static). - * No code is needed for this module symbol. + /* Non-method term members are fields, except for module members. Module + * members can only happen on .NET (no flatten) for inner traits. There, + * a module symbol is generated (transformInfo in mixin) which is used + * as owner for the members of the implementation class (so that the + * backend emits them as static). + * No code is needed for this module symbol. */ for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule) ctx.clazz addField new IField(f) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 82fdcbbc04..b7b07a579f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -169,10 +169,10 @@ abstract class ICodeCheckers { def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference) - /** XXX workaround #1: one stack empty, the other has BoxedUnit. - * One example where this arises is: + /* XXX workaround #1: one stack empty, the other has BoxedUnit. + * One example where this arises is: * - * def f(b: Boolean): Unit = synchronized { if (b) () } + * def f(b: Boolean): Unit = synchronized { if (b) () } */ def allUnits(s: TypeStack) = s.types forall (_ == BoxedUnitReference) @@ -181,10 +181,10 @@ abstract class ICodeCheckers { case (x1, x2) if f(x2) => x1 } - /** XXX workaround #2: different stacks heading into an exception - * handler which will clear them anyway. Examples where it arises: + /* XXX workaround #2: different stacks heading into an exception + * handler which will clear them anyway. Examples where it arises: * - * var bippy: Int = synchronized { if (b) 5 else 10 } + * var bippy: Int = synchronized { if (b) 5 else 10 } */ def isHandlerBlock() = bl.exceptionHandlerStart @@ -336,7 +336,7 @@ abstract class ICodeCheckers { def popStack2 = { checkStack(2) ; (popStackN(2): @unchecked) match { case List(x, y) => (x, y) } } def popStack3 = { checkStack(3) ; (popStackN(3): @unchecked) match { case List(x, y, z) => (x, y, z) } } - /** Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */ + /* Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */ def clearStack() = { if (stack.nonEmpty) logChecker("Wiping out the " + stack.length + " element stack for exception handler: " + stack) @@ -385,7 +385,7 @@ abstract class ICodeCheckers { icodeError(" expected reference type, but " + obj + " found") } - /** Checks that tpe is a subtype of one of the allowed types */ + /* Checks that tpe is a subtype of one of the allowed types */ def checkType(tpe: TypeKind, allowed: TypeKind*) = ( if (allowed exists (k => isSubtype(tpe, k))) () else icodeError(tpe + " is not one of: " + allowed.mkString("{ ", ", ", " }")) @@ -393,16 +393,14 @@ abstract class ICodeCheckers { def checkNumeric(tpe: TypeKind) = checkType(tpe, BYTE, CHAR, SHORT, INT, LONG, FLOAT, DOUBLE) - /** Checks that the 2 topmost elements on stack are of the - * kind TypeKind. - */ + /* Checks that the 2 topmost elements on stack are of the kind TypeKind. */ def checkBinop(kind: TypeKind) { val (a, b) = popStack2 checkType(a, kind) checkType(b, kind) } - /** Check that arguments on the stack match method params. */ + /* Check that arguments on the stack match method params. */ def checkMethodArgs(method: Symbol) { val params = method.info.paramTypes checkStack(params.length) @@ -412,8 +410,8 @@ abstract class ICodeCheckers { ) } - /** Checks that the object passed as receiver has a method - * `method` and that it is callable from the current method. + /* Checks that the object passed as receiver has a method + * `method` and that it is callable from the current method. */ def checkMethod(receiver: TypeKind, method: Symbol) = receiver match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 1875c8c914..2c8fda85f4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -130,21 +130,21 @@ trait TypeKinds { self: ICodes => * The lub is based on the lub of scala types. */ def lub(a: TypeKind, b: TypeKind): TypeKind = { - /** The compiler's lub calculation does not order classes before traits. - * This is apparently not wrong but it is inconvenient, and causes the - * icode checker to choke when things don't match up. My attempts to - * alter the calculation at the compiler level were failures, so in the - * interests of a working icode checker I'm making the adjustment here. + /* The compiler's lub calculation does not order classes before traits. + * This is apparently not wrong but it is inconvenient, and causes the + * icode checker to choke when things don't match up. My attempts to + * alter the calculation at the compiler level were failures, so in the + * interests of a working icode checker I'm making the adjustment here. * - * Example where we'd like a different answer: + * Example where we'd like a different answer: * - * abstract class Tom - * case object Bob extends Tom - * case object Harry extends Tom - * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product" + * abstract class Tom + * case object Bob extends Tom + * case object Harry extends Tom + * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product" * - * Here we make the adjustment by rewinding to a pre-erasure state and - * sifting through the parents for a class type. + * Here we make the adjustment by rewinding to a pre-erasure state and + * sifting through the parents for a class type. */ def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry { val tp = global.lub(List(tk1.toType, tk2.toType)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 4a3d1805d9..99928d965b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -513,7 +513,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { */ def javaName(sym: Symbol): String = { - /** + /* * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer * * Note: This method is called recursively thus making sure that we add complete chain @@ -608,9 +608,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) } def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) { - /** The outer name for this inner class. Note that it returns null - * when the inner class should not get an index in the constant pool. - * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS. + /* The outer name for this inner class. Note that it returns null + * when the inner class should not get an index in the constant pool. + * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS. */ def outerName(innerSym: Symbol): String = { if (innerSym.originalEnclosingMethod != NoSymbol) @@ -1044,9 +1044,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType // val paramNames = 0 until paramJavaTypes.length map ("x_" + _) - /** Forwarders must not be marked final, - * as the JVM will not allow redefinition of a final static method, - * and we don't know what classes might be subclassing the companion class. See SI-4827. + /* Forwarders must not be marked final, + * as the JVM will not allow redefinition of a final static method, + * and we don't know what classes might be subclassing the companion class. See SI-4827. */ // TODO: evaluate the other flags we might be dropping on the floor here. // TODO: ACC_SYNTHETIC ? @@ -1270,8 +1270,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case _ => None } - /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents. - * This is important on Android because there is otherwise an interface explosion. + /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents. + * This is important on Android because there is otherwise an interface explosion. */ def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = { var rest = lstIfaces @@ -1847,7 +1847,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val keyMax = keys(keys.length - 1) val isDenseEnough: Boolean = { - /** Calculate in long to guard against overflow. TODO what overflow??? */ + /* Calculate in long to guard against overflow. TODO what overflow??? */ val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double] val klenD: Double = keys.length val kdensity: Double = (klenD / keyRangeD) @@ -1982,7 +1982,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock()) // ------------------------------------------------------------------------------------------------------------ - /**Generate exception handlers for the current method. + /* Generate exception handlers for the current method. * * Quoting from the JVMS 4.7.3 The Code Attribute * The items of the Code_attribute structure are as follows: @@ -2005,16 +2005,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { */ def genExceptionHandlers() { - /** Return a list of pairs of intervals where the handler is active. - * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end]. - * Preconditions: - * - e.covered non-empty - * Postconditions for the result: - * - always non-empty - * - intervals are sorted as per `linearization` - * - the argument's `covered` blocks have been grouped into maximally contiguous intervals, - * ie. between any two intervals in the result there is a non-empty gap. - * - each of the `covered` blocks in the argument is contained in some interval in the result + /* Return a list of pairs of intervals where the handler is active. + * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end]. + * Preconditions: + * - e.covered non-empty + * Postconditions for the result: + * - always non-empty + * - intervals are sorted as per `linearization` + * - the argument's `covered` blocks have been grouped into maximally contiguous intervals, + * ie. between any two intervals in the result there is a non-empty gap. + * - each of the `covered` blocks in the argument is contained in some interval in the result */ def intervals(e: ExceptionHandler): List[BlockInteval] = { assert(e.covered.nonEmpty, e) @@ -2460,7 +2460,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case icodes.mthdsCat => def genMethodsInstr() = (instr: @unchecked) match { - /** Special handling to access native Array.clone() */ + /* Special handling to access native Array.clone() */ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) => val target: String = javaType(call.targetTypeKind).getInternalName jcode.invokevirtual(target, "clone", mdesc_arrayClone) @@ -2610,7 +2610,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - /** + /* * Emits one or more conversion instructions based on the types given as arguments. * * @param from The type of the value to be converted into another type. @@ -3171,7 +3171,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - /** + /* * Computes a mapping from jump only block to its * final destination which is either a non-jump-only * block or, if it's in a jump-only block cycle, is diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index b80acc2324..7187bacb06 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -423,9 +423,7 @@ abstract class ConstantOptimization extends SubComponent { in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) }) } - /** - * common code for interpreting CJUMP and CZJUMP - */ + /* common code for interpreting CJUMP and CZJUMP */ def interpretConditional(kind: TypeKind, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = { // TODO use reaching analysis to update the state in the two branches // e.g. if the comparison was checking null equality on local x diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 1a73764719..38040d921f 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -342,7 +342,7 @@ abstract class Inliners extends SubComponent { inlineWithoutTFA(inputBlocks, callsites) } - /** + /* * Inline straightforward callsites (those that can be inlined without a TFA). * * To perform inlining, all we need to know is listed as formal params in `analyzeInc()`: @@ -372,7 +372,7 @@ abstract class Inliners extends SubComponent { inlineCount } - /** + /* * Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod` * at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed. * @@ -788,7 +788,7 @@ abstract class Inliners extends SubComponent { val varsInScope = mutable.HashSet[Local]() ++= block.varsInScope - /** Side effects varsInScope when it sees SCOPE_ENTERs. */ + /* Side effects varsInScope when it sees SCOPE_ENTERs. */ def instrBeforeFilter(i: Instruction): Boolean = { i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () } i ne instr @@ -801,7 +801,7 @@ abstract class Inliners extends SubComponent { // store the '$this' into the special local val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass)) - /** buffer for the returned value */ + /* buffer for the returned value */ val retVal = inc.m.returnType match { case UNIT => null case x => newLocal("$retVal", x) @@ -809,7 +809,7 @@ abstract class Inliners extends SubComponent { val inlinedLocals = mutable.HashMap.empty[Local, Local] - /** Add a new block in the current context. */ + /* Add a new block in the current context. */ def newBlock() = { val b = caller.m.code.newBlock() activeHandlers foreach (_ addCoveredBlock b) @@ -826,7 +826,7 @@ abstract class Inliners extends SubComponent { handler } - /** alfa-rename `l` in caller's context. */ + /* alfa-rename `l` in caller's context. */ def dupLocal(l: Local): Local = { val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos) // sym.setInfo(l.sym.tpe) @@ -837,10 +837,10 @@ abstract class Inliners extends SubComponent { val afterBlock = newBlock() - /** Map from nw.init instructions to their matching NEW call */ + /* Map from nw.init instructions to their matching NEW call */ val pending: mutable.Map[Instruction, NEW] = new mutable.HashMap - /** Map an instruction from the callee to one suitable for the caller. */ + /* Map an instruction from the callee to one suitable for the caller. */ def map(i: Instruction): Instruction = { def assertLocal(l: Local) = { assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 00e5875852..71b97e86a6 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -79,11 +79,11 @@ trait Plugins { val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet) - /** Verify requirements are present. */ + /* Verify requirements are present. */ for (req <- settings.require.value ; if !(plugs exists (_.name == req))) globalError("Missing required plugin: " + req) - /** Process plugin options. */ + /* Process plugin options. */ def namec(plug: Plugin) = plug.name + ":" def optList(xs: List[String], p: Plugin) = xs filter (_ startsWith namec(p)) def doOpts(p: Plugin): List[String] = @@ -95,7 +95,7 @@ trait Plugins { p.processOptions(opts, globalError) } - /** Verify no non-existent plugin given with -P */ + /* Verify no non-existent plugin given with -P */ for (opt <- settings.pluginOptions.value ; if plugs forall (p => optList(List(opt), p).isEmpty)) globalError("bad option: -P:" + opt) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f8930c4ddd..d26a61f187 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -503,8 +503,8 @@ abstract class ClassfileParser { val nameIdx = in.nextChar currentClass = pool.getClassName(nameIdx) - /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. - * Updates the read pointer of 'in'. */ + /* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. + * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { if (isScala) { in.nextChar // skip superclass @@ -984,8 +984,8 @@ abstract class ClassfileParser { Some(ScalaSigBytes(pool.getBytes(entries.toList))) } - /** Parse and return a single annotation. If it is malformed, - * return None. + /* Parse and return a single annotation. If it is malformed, + * return None. */ def parseAnnotation(attrNameIndex: Char): Option[AnnotationInfo] = try { val attrType = pool.getType(attrNameIndex) @@ -1030,7 +1030,7 @@ abstract class ClassfileParser { None // ignore malformed annotations } - /** + /* * Parse the "Exceptions" attribute which denotes the exceptions * thrown by a method. */ @@ -1046,8 +1046,8 @@ abstract class ClassfileParser { } } - /** Parse a sequence of annotations and attaches them to the - * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */ + /* Parse a sequence of annotations and attaches them to the + * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */ def parseAnnotations(len: Int): Option[AnnotationInfo] = { val nAttr = in.nextChar var scalaSigAnnot: Option[AnnotationInfo] = None @@ -1173,7 +1173,7 @@ abstract class ClassfileParser { * If the given name is not an inner class, it returns the symbol found in `definitions`. */ def classSymbol(externalName: Name): Symbol = { - /** Return the symbol of `innerName`, having the given `externalName`. */ + /* Return the symbol of `innerName`, having the given `externalName`. */ def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = { def getMember(sym: Symbol, name: Name): Symbol = if (static) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 7010c9e20a..86f034223d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -197,7 +197,7 @@ abstract class ICodeReader extends ClassfileParser { import code._ var size = 1 // instruction size - /** Parse 16 bit jump target. */ + /* Parse 16 bit jump target. */ def parseJumpTarget = { size += 2 val offset = in.nextChar.toShort @@ -206,7 +206,7 @@ abstract class ICodeReader extends ClassfileParser { target } - /** Parse 32 bit jump target. */ + /* Parse 32 bit jump target. */ def parseJumpTargetW: Int = { size += 4 val offset = in.nextInt diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 9b33ae8ba1..9217bbeeb8 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -222,7 +222,7 @@ abstract class Pickler extends SubComponent { case NullaryMethodType(restpe) => putType(restpe) case PolyType(tparams, restpe) => - /** no longer needed since all params are now local + /* no longer needed since all params are now local tparams foreach { tparam => if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local } diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 5fbc15f858..9c77c3583b 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -174,8 +174,8 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => override def complete(implSym: Symbol) { debuglog("LazyImplClassType completing " + implSym) - /** If `tp` refers to a non-interface trait, return a - * reference to its implementation class. Otherwise return `tp`. + /* If `tp` refers to a non-interface trait, return a + * reference to its implementation class. Otherwise return `tp`. */ def mixinToImplClass(tp: Type): Type = AddInterfaces.this.erasure(implSym) { tp match { //@MATN: no normalize needed (comes after erasure) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index a871c72fc2..3b9cee2d88 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -351,7 +351,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { else if (resultSym == ObjectClass) tree // no cast necessary else gen.mkCast(tree, boxedResType) // cast to expected type - /** Normal non-Array call */ + /* Normal non-Array call */ def genDefaultCall = { // reflective method call machinery val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...) @@ -369,7 +369,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY) } - /** A possible primitive method call, represented by methods in BoxesRunTime. */ + /* A possible primitive method call, represented by methods in BoxesRunTime. */ def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args) def genValueCallWithTest = { getPrimitiveReplacementForStructuralCall(methSym.name) match { @@ -380,7 +380,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { } } - /** A native Array call. */ + /* A native Array call. */ def genArrayCall = fixResult( methSym.name match { case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args) @@ -391,9 +391,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL { mustBeUnit = methSym.name == nme.update ) - /** A conditional Array call, when we can't determine statically if the argument is - * an Array, but the structural type method signature is consistent with an Array method - * so we have to generate both kinds of code. + /* A conditional Array call, when we can't determine statically if the argument is + * an Array, but the structural type method signature is consistent with an Array method + * so we have to generate both kinds of code. */ def genArrayCallWithTest = IF ((qual1() GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 886c790ec0..a560afe3c0 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -303,10 +303,10 @@ abstract class Constructors extends Transform with ast.TreeDSL { copyParam(acc, parameter(acc)) } - /** Return a single list of statements, merging the generic class constructor with the - * specialized stats. The original statements are retyped in the current class, and - * assignments to generic fields that have a corresponding specialized assignment in - * `specializedStats` are replaced by the specialized assignment. + /* Return a single list of statements, merging the generic class constructor with the + * specialized stats. The original statements are retyped in the current class, and + * assignments to generic fields that have a corresponding specialized assignment in + * `specializedStats` are replaced by the specialized assignment. */ def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = { val specBuf = new ListBuffer[Tree] @@ -321,10 +321,10 @@ abstract class Constructors extends Transform with ast.TreeDSL { case _ => false } - /** Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array. - * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type - * variable, but after specialization this is a concrete primitive type, so it would - * be an error to pass it to array_update(.., .., Object). + /* Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array. + * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type + * variable, but after specialization this is a concrete primitive type, so it would + * be an error to pass it to array_update(.., .., Object). */ def rewriteArrayUpdate(tree: Tree): Tree = { val adapter = new Transformer { @@ -373,14 +373,14 @@ abstract class Constructors extends Transform with ast.TreeDSL { res } - /** Add an 'if' around the statements coming after the super constructor. This - * guard is necessary if the code uses specialized fields. A specialized field is - * initialized in the subclass constructor, but the accessors are (already) overridden - * and pointing to the (empty) fields. To fix this, a class with specialized fields - * will not run its constructor statements if the instance is specialized. The specialized - * subclass includes a copy of those constructor statements, and runs them. To flag that a class - * has specialized fields, and their initialization should be deferred to the subclass, method - * 'specInstance$' is added in phase specialize. + /* Add an 'if' around the statements coming after the super constructor. This + * guard is necessary if the code uses specialized fields. A specialized field is + * initialized in the subclass constructor, but the accessors are (already) overridden + * and pointing to the (empty) fields. To fix this, a class with specialized fields + * will not run its constructor statements if the instance is specialized. The specialized + * subclass includes a copy of those constructor statements, and runs them. To flag that a class + * has specialized fields, and their initialization should be deferred to the subclass, method + * 'specInstance$' is added in phase specialize. */ def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else { // split the statements in presuper and postsuper @@ -425,8 +425,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { } */ - /** Create a getter or a setter and enter into `clazz` scope - */ + /* Create a getter or a setter and enter into `clazz` scope */ def addAccessor(sym: Symbol, name: TermName, flags: Long) = { val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz clazz.info.decls enter m @@ -555,7 +554,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz)))) } - /** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */ + /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */ def splitAtSuper(stats: List[Tree]) = { def isConstr(tree: Tree) = (tree.symbol ne null) && tree.symbol.isConstructor val (pre, rest0) = stats span (!isConstr(_)) @@ -566,12 +565,12 @@ abstract class Constructors extends Transform with ast.TreeDSL { val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList) var remainingConstrStats = remainingConstrStats0 - /** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much, - * but excluding it includes too much. The constructor sequence being mimicked - * needs to be reproduced with total fidelity. + /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much, + * but excluding it includes too much. The constructor sequence being mimicked + * needs to be reproduced with total fidelity. * - * See test case files/run/bug4680.scala, the output of which is wrong in many - * particulars. + * See test case files/run/bug4680.scala, the output of which is wrong in many + * particulars. */ val needsDelayedInit = (clazz isSubClass DelayedInitClass) /*&& !(defBuf exists isInitDef)*/ && remainingConstrStats.nonEmpty diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 55b9ce1be9..c8ee39b9d6 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -559,11 +559,11 @@ abstract class Erasure extends AddInterfaces case x => assert(x != ArrayClass) tree match { - /** Can't always remove a Box(Unbox(x)) combination because the process of boxing x - * may lead to throwing an exception. + /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x + * may lead to throwing an exception. * - * This is important for specialization: calls to the super constructor should not box/unbox specialized - * fields (see TupleX). (ID) + * This is important for specialization: calls to the super constructor should not box/unbox specialized + * fields (see TupleX). (ID) */ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) => log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}") diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala index 83dbc23014..1bbe1b8410 100644 --- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala @@ -4,8 +4,8 @@ package transform trait InlineErasure { self: Erasure => -/** +/* import global._ import definitions._ - **/ + */ } diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 60815da967..6ff8792a45 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -253,8 +253,7 @@ abstract class LambdaLift extends InfoTransform { } } - /** Rename a trait's interface and implementation class in coordinated fashion. - */ + /* Rename a trait's interface and implementation class in coordinated fashion. */ def renameTrait(traitSym: Symbol, implSym: Symbol) { val originalImplName = implSym.name renameSym(traitSym) @@ -457,7 +456,7 @@ abstract class LambdaLift extends InfoTransform { case arg => arg } - /** Wrap expr argument in new *Ref(..) constructor. But try/catch + /* Wrap expr argument in new *Ref(..) constructor. But try/catch * is a problem because a throw will clear the stack and post catch * we would expect the partially-constructed object to be on the stack * for the call to init. So we recursively diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index f7e3310f88..e0b30ab9f9 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -201,7 +201,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { treatedClassInfos(clazz) = clazz.info assert(phase == currentRun.mixinPhase, phase) - /** Create a new getter. Getters are never private or local. They are + /* Create a new getter. Getters are never private or local. They are * always accessors and deferred. */ def newGetter(field: Symbol): Symbol = { // println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE)) @@ -210,8 +210,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { clazz.newMethod(nme.getterName(field.name.toTermName), field.pos, newFlags) setInfo MethodType(Nil, field.info) } - /** Create a new setter. Setters are never private or local. They are - * always accessors and deferred. */ + /* Create a new setter. Setters are never private or local. They are + * always accessors and deferred. */ def newSetter(field: Symbol): Symbol = { //println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE)) val setterName = nme.getterToSetter(nme.getterName(field.name.toTermName)) @@ -264,7 +264,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { resetFlag DEFERRED | lateDEFERRED ) - /** Mix in members of implementation class mixinClass into class clazz */ + /* Mix in members of implementation class mixinClass into class clazz */ def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) { if (!mixinClass.isImplClass) debugwarn ("Impl class flag is not set " + ((mixinClass.debugLocationString, mixinInterface.debugLocationString))) @@ -280,9 +280,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } } - /** Mix in members of trait mixinClass into class clazz. Also, - * for each lazy field in mixinClass, add a link from its mixed in member to its - * initializer method inside the implclass. + /* Mix in members of trait mixinClass into class clazz. Also, + * for each lazy field in mixinClass, add a link from its mixed in member to its + * initializer method inside the implclass. */ def mixinTraitMembers(mixinClass: Symbol) { // For all members of a trait's interface do: @@ -649,34 +649,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = { val newDefs = mutable.ListBuffer[Tree]() - /** Attribute given tree and anchor at given position */ + /* Attribute given tree and anchor at given position */ def attributedDef(pos: Position, tree: Tree): Tree = { debuglog("add new def to " + clazz + ": " + tree) typedPos(pos)(tree) } - /** The position of given symbol, or, if this is undefined, - * the position of the current class. + /* The position of given symbol, or, if this is undefined, + * the position of the current class. */ def position(sym: Symbol) = if (sym.pos == NoPosition) clazz.pos else sym.pos - /** Add tree at given position as new definition */ + /* Add tree at given position as new definition */ def addDef(pos: Position, tree: Tree) { newDefs += attributedDef(pos, tree) } - /** Add new method definition. + /* Add new method definition. * - * @param sym The method symbol. - * @param rhs The method body. + * @param sym The method symbol. + * @param rhs The method body. */ def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), DefDef(sym, rhs)) def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs)) - /** Add `newdefs` to `stats`, removing any abstract method definitions - * in `stats` that are matched by some symbol defined in - * `newDefs`. + /* Add `newdefs` to `stats`, removing any abstract method definitions + * in `stats` that are matched by some symbol defined in + * `newDefs`. */ def add(stats: List[Tree], newDefs: List[Tree]) = { val newSyms = newDefs map (_.symbol) @@ -692,12 +692,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else newDefs ::: (stats filter isNotDuplicate) } - /** If `stat` is a superaccessor, complete it by adding a right-hand side. - * Note: superaccessors are always abstract until this point. - * The method to call in a superaccessor is stored in the accessor symbol's alias field. - * The rhs is: - * super.A(xs) where A is the super accessor's alias and xs are its formal parameters. - * This rhs is typed and then mixin transformed. + /* If `stat` is a superaccessor, complete it by adding a right-hand side. + * Note: superaccessors are always abstract until this point. + * The method to call in a superaccessor is stored in the accessor symbol's alias field. + * The rhs is: + * super.A(xs) where A is the super accessor's alias and xs are its formal parameters. + * This rhs is typed and then mixin transformed. */ def completeSuperAccessor(stat: Tree) = stat match { case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor => @@ -709,7 +709,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { stat } - /** + /* * Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse * the bitmap of its parents. If that does not exist yet we create one. */ @@ -751,7 +751,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset) } - /** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */ + /* Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */ def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = { val bmp = bitmapFor(clazz, offset, valSym) def mask = maskForOffset(offset, valSym, kind) @@ -761,8 +761,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { x === newValue } - /** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the - * precise comparison operator depending on the value of 'equalToZero'. + /* Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the + * precise comparison operator depending on the value of 'equalToZero'. */ def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = { val bitmapTree = (This(clazz) DOT bitmapSym) @@ -800,17 +800,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } - /** Always copy the tree if we are going to perform sym substitution, - * otherwise we will side-effect on the tree that is used in the fast path - */ - class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) { - override def transform(tree: Tree): Tree = - if (tree.hasSymbolField && from.contains(tree.symbol)) - super.transform(tree.duplicate) - else super.transform(tree.duplicate) + /* Always copy the tree if we are going to perform sym substitution, + * otherwise we will side-effect on the tree that is used in the fast path + */ + class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) { + override def transform(tree: Tree): Tree = + if (tree.hasSymbolField && from.contains(tree.symbol)) + super.transform(tree.duplicate) + else super.transform(tree.duplicate) - override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree) - } + override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree) + } /** return a 'lazified' version of rhs. It uses double-checked locking to ensure * initialization is performed at most once. For performance reasons the double-checked @@ -889,11 +889,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { typedPos(pos)(BLOCK(result, retVal)) } - /** Complete lazy field accessors. Applies only to classes, - * for it's own (non inherited) lazy fields. If 'checkinit' - * is enabled, getters that check for the initialized bit are - * generated, and the class constructor is changed to set the - * initialized bits. + /* Complete lazy field accessors. Applies only to classes, + * for it's own (non inherited) lazy fields. If 'checkinit' + * is enabled, getters that check for the initialized bit are + * generated, and the class constructor is changed to set the + * initialized bits. */ def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = { def dd(stat: DefDef) = { @@ -974,17 +974,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } } - /** Adds statements to set the 'init' bit for each field initialized - * in the body of a constructor. + /* Adds statements to set the 'init' bit for each field initialized + * in the body of a constructor. */ def addInitBits(clazz: Symbol, rhs: Tree): Tree = new AddInitBitsTransformer(clazz) transform rhs // begin addNewDefs - /** Fill the map from fields to offset numbers. - * Instead of field symbols, the map keeps their getter symbols. This makes - * code generation easier later. + /* Fill the map from fields to offset numbers. + * Instead of field symbols, the map keeps their getter symbols. This makes + * code generation easier later. */ def buildBitmapOffsets() { def fold(fields: List[Symbol], category: Name) = { diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala index 2a86d711f1..96263f3c0c 100644 --- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala @@ -33,11 +33,11 @@ trait PostErasure extends InfoTransform with TypingTransformers { override def transform(tree: Tree) = { def finish(res: Tree) = logResult(s"Posterasure reduction\n Old: $tree\n New")(res) - /** We use the name of the operation being performed and not the symbol - * itself because the symbol hails from the boxed class, and this transformation - * exists to operate directly on the values. So we are for instance looking - * up == on an lhs of type Int, whereas the symbol which has been passed in - * is from java.lang.Integer. + /* We use the name of the operation being performed and not the symbol + * itself because the symbol hails from the boxed class, and this transformation + * exists to operate directly on the values. So we are for instance looking + * up == on an lhs of type Int, whereas the symbol which has been passed in + * is from java.lang.Integer. */ def binop(lhs: Tree, op: Symbol, rhs: Tree) = finish(localTyper typed (Apply(Select(lhs, op.name) setPos tree.pos, rhs :: Nil) setPos tree.pos)) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0cd7f516ef..9e0570ba99 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -518,9 +518,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = { def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = { - /** It gets hard to follow all the clazz and cls, and specializedClass - * was both already used for a map and mucho long. So "sClass" is the - * specialized subclass of "clazz" throughout this file. + /* It gets hard to follow all the clazz and cls, and specializedClass + * was both already used for a map and mucho long. So "sClass" is the + * specialized subclass of "clazz" throughout this file. */ // SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is @@ -558,12 +558,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def applyContext(tpe: Type) = subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe)) - /** Return a list of specialized parents to be re-mixed in a specialized subclass. - * Assuming env = [T -> Int] and - * class Integral[@specialized T] extends Numeric[T] - * and Numeric[U] is specialized on U, this produces List(Numeric$mcI). + /* Return a list of specialized parents to be re-mixed in a specialized subclass. + * Assuming env = [T -> Int] and + * class Integral[@specialized T] extends Numeric[T] + * and Numeric[U] is specialized on U, this produces List(Numeric$mcI). * - * so that class Integral$mci extends Integral[Int] with Numeric$mcI. + * so that class Integral$mci extends Integral[Int] with Numeric$mcI. */ def specializedParents(parents: List[Type]): List[Type] = { var res: List[Type] = Nil @@ -604,10 +604,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { exitingSpecialize(sClass setInfo specializedInfoType) val fullEnv = outerEnv ++ env - /** Enter 'sym' in the scope of the current specialized class. It's type is - * mapped through the active environment, binding type variables to concrete - * types. The existing typeEnv for `sym` is composed with the current active - * environment + /* Enter 'sym' in the scope of the current specialized class. It's type is + * mapped through the active environment, binding type variables to concrete + * types. The existing typeEnv for `sym` is composed with the current active + * environment */ def enterMember(sym: Symbol): Symbol = { typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment @@ -620,18 +620,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { decls1 enter subst(fullEnv)(sym) } - /** Create and enter in scope an overridden symbol m1 for `m` that forwards - * to `om`. `om` is a fresh, special overload of m1 that is an implementation - * of `m`. For example, for a + /* Create and enter in scope an overridden symbol m1 for `m` that forwards + * to `om`. `om` is a fresh, special overload of m1 that is an implementation + * of `m`. For example, for a * - * class Foo[@specialized A] { - * def m(x: A) = // m - * } - * , for class Foo$I extends Foo[Int], this method enters two new symbols in - * the scope of Foo$I: + * class Foo[@specialized A] { + * def m(x: A) = // m + * } + * , for class Foo$I extends Foo[Int], this method enters two new symbols in + * the scope of Foo$I: * - * def m(x: Int) = m$I(x) // m1 - * def m$I(x: Int) = /adapted to env {A -> Int} // om + * def m(x: Int) = m$I(x) // m1 + * def m$I(x: Int) = /adapted to env {A -> Int} // om */ def forwardToOverload(m: Symbol): Symbol = { val specMember = enterMember(cloneInSpecializedClass(m, f => (f | OVERRIDE) & ~(DEFERRED | CASEACCESSOR))) @@ -935,13 +935,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * this method will return List('apply$mcII$sp') */ private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) { - /** Return the overridden symbol in syms that needs a specialized overriding symbol, - * together with its specialization environment. The overridden symbol may not be - * the closest to 'overriding', in a given hierarchy. + /* Return the overridden symbol in syms that needs a specialized overriding symbol, + * together with its specialization environment. The overridden symbol may not be + * the closest to 'overriding', in a given hierarchy. * - * An method m needs a special override if - * * m overrides a method whose type contains specialized type variables - * * there is a valid specialization environment that maps the overridden method type to m's type. + * An method m needs a special override if + * * m overrides a method whose type contains specialized type variables + * * there is a valid specialization environment that maps the overridden method type to m's type. */ def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = { def checkOverriddenTParams(overridden: Symbol) { @@ -1797,11 +1797,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = { log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)") - /** A constructor parameter `f` initializes a specialized field - * iff: - * - it is specialized itself - * - there is a getter for the original (non-specialized) field in the same class - * - there is a getter for the specialized field in the same class + /* A constructor parameter `f` initializes a specialized field + * iff: + * - it is specialized itself + * - there is a getter for the original (non-specialized) field in the same class + * - there is a getter for the specialized field in the same class */ def initializesSpecializedField(f: Symbol) = ( (f.name endsWith nme.SPECIALIZED_SUFFIX) diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 2418698a18..92ed7fc555 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -134,9 +134,9 @@ abstract class TailCalls extends Transform { this.tailPos = true this.failPos = dd.pos - /** Create a new method symbol for the current method and store it in - * the label field. - */ + /* Create a new method symbol for the current method and store it in + * the label field. + */ this.label = { val label = method.newLabel(newTermName("_" + method.name), method.pos) val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis) @@ -186,8 +186,7 @@ abstract class TailCalls extends Transform { } override def transform(tree: Tree): Tree = { - /** A possibly polymorphic apply to be considered for tail call transformation. - */ + /* A possibly polymorphic apply to be considered for tail call transformation. */ def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = { val receiver: Tree = fun match { case Select(qual, _) => qual @@ -200,8 +199,8 @@ abstract class TailCalls extends Transform { def transformArgs = noTailTransforms(args) def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol)) - /** Records failure reason in Context for reporting. - * Position is unchanged (by default, the method definition.) + /* Records failure reason in Context for reporting. + * Position is unchanged (by default, the method definition.) */ def fail(reason: String) = { debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason) @@ -209,8 +208,7 @@ abstract class TailCalls extends Transform { ctx.failReason = reason treeCopy.Apply(tree, noTailTransform(target), transformArgs) } - /** Position of failure is that of the tree being considered. - */ + /* Position of failure is that of the tree being considered. */ def failHere(reason: String) = { ctx.failPos = fun.pos fail(reason) @@ -264,8 +262,8 @@ abstract class TailCalls extends Transform { deriveDefDef(tree){rhs => if (newCtx.isTransformed) { - /** We have rewritten the tree, but there may be nested recursive calls remaining. - * If @tailrec is given we need to fail those now. + /* We have rewritten the tree, but there may be nested recursive calls remaining. + * If @tailrec is given we need to fail those now. */ if (newCtx.isMandatory) { for (t @ Apply(fn, _) <- newRHS ; if fn.symbol == newCtx.method) { diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 94ca1206b9..90f67d6300 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -401,8 +401,7 @@ abstract class UnCurry extends InfoTransform finally needTryLift = saved } - /** Transform tree `t` to { def f = t; f } where `f` is a fresh name - */ + /* Transform tree `t` to { def f = t; f } where `f` is a fresh name */ def liftTree(tree: Tree) = { debuglog("lifting tree at: " + (tree.pos)) val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 69d9987b05..47f8b14e49 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -366,7 +366,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1" */ lazy val implications = { - /** when we know V = C, which other equalities must hold + /* when we know V = C, which other equalities must hold * * in general, equality to some type implies equality to its supertypes * (this multi-valued kind of equality is necessary for unreachability) @@ -385,7 +385,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { // else debug.patmat("NOT implies: "+(lower, upper)) - /** does V = C preclude V having value `other`? + /* does V = C preclude V having value `other`? (1) V = null is an exclusive assignment, (2) V = A and V = B, for A and B value constants, are mutually exclusive unless A == B we err on the safe side, for example: @@ -623,4 +623,4 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { override def toString = "null" } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 23b33e9be6..9ebbc2fea4 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -309,7 +309,7 @@ trait MatchTranslation { self: PatternMatching => // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info)) translateExtractorPattern(ExtractorCall(unfun, args)) - /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0. + /* A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0. It consists of a stable identifier c, followed by element patterns p1, ..., pn. The constructor c is a simple or qualified name which denotes a case class (§5.3.2). @@ -328,22 +328,22 @@ trait MatchTranslation { self: PatternMatching => noFurtherSubPats() } - /** A typed pattern x : T consists of a pattern variable x and a type pattern T. - The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type. - This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value. - **/ + /* A typed pattern x : T consists of a pattern variable x and a type pattern T. + The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type. + This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value. + */ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type case MaybeBoundTyped(subPatBinder, pt) => val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize // a typed pattern never has any subtrees noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos)) - /** A pattern binder x@p consists of a pattern variable x and a pattern p. - The type of the variable x is the static type T of the pattern p. - This pattern matches any value v matched by the pattern p, - provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503 - and it binds the variable name to that value. - **/ + /* A pattern binder x@p consists of a pattern variable x and a pattern p. + The type of the variable x is the static type T of the pattern p. + This pattern matches any value v matched by the pattern p, + provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503 + and it binds the variable name to that value. + */ case Bound(subpatBinder, p) => // replace subpatBinder by patBinder (as if the Bind was not there) withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)), @@ -351,14 +351,14 @@ trait MatchTranslation { self: PatternMatching => (patBinder, p) ) - /** 8.1.4 Literal Patterns - A literal pattern L matches any value that is equal (in terms of ==) to the literal L. - The type of L must conform to the expected type of the pattern. + /* 8.1.4 Literal Patterns + A literal pattern L matches any value that is equal (in terms of ==) to the literal L. + The type of L must conform to the expected type of the pattern. - 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1)) - The pattern matches any value v such that r == v (§12.1). - The type of r must conform to the expected type of the pattern. - **/ + 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1)) + The pattern matches any value v such that r == v (§12.1). + The type of r must conform to the expected type of the pattern. + */ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos)) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 0af75a2aad..dc48cac26c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -302,7 +302,7 @@ trait ContextErrors { val target = qual.tpe.widen def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else "" def nameString = decodeWithKind(name, owner) - /** Illuminating some common situations and errors a bit further. */ + /* Illuminating some common situations and errors a bit further. */ def addendum = { val companion = { if (name.isTermName && owner.isPackageClass) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 429bd7d682..0f85f8ee22 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -532,7 +532,7 @@ trait Contexts { self: Analyzer => case _ => false } - /** Is protected access to target symbol permitted */ + /* Is protected access to target symbol permitted */ def isProtectedAccessOK(target: Symbol) = { val c = enclosingSubClassContext(sym.owner) if (c == NoContext) diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index 80dfef6c7b..282dd8a99d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -56,8 +56,8 @@ trait EtaExpansion { self: Analyzer => } val defs = new ListBuffer[Tree] - /** Append to `defs` value definitions for all non-stable - * subexpressions of the function application `tree`. + /* Append to `defs` value definitions for all non-stable + * subexpressions of the function application `tree`. */ def liftoutPrefix(tree: Tree): Tree = { def liftout(tree: Tree, byName: Boolean): Tree = @@ -106,8 +106,7 @@ trait EtaExpansion { self: Analyzer => tree1 } - /** Eta-expand lifted tree. - */ + /* Eta-expand lifted tree. */ def expand(tree: Tree, tpe: Type): Tree = tpe match { case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit => val params: List[(ValDef, Boolean)] = paramSyms.map { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 5b11adf127..1397513058 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -873,8 +873,8 @@ trait Implicits { } if (best.isFailure) { - /** If there is no winner, and we witnessed and caught divergence, - * now we can throw it for the error message. + /* If there is no winner, and we witnessed and caught divergence, + * now we can throw it for the error message. */ if (divergence) throw DivergentImplicit @@ -934,8 +934,8 @@ trait Implicits { */ private def companionImplicitMap(tp: Type): InfoMap = { - /** Populate implicit info map by traversing all parts of type `tp`. - * Parameters as for `getParts`. + /* Populate implicit info map by traversing all parts of type `tp`. + * Parameters as for `getParts`. */ def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) = tp match { case TypeRef(pre, sym, args) => @@ -967,13 +967,13 @@ trait Implicits { } } - /** Populate implicit info map by traversing all parts of type `tp`. - * This method is performance critical. - * @param tp The type for which we want to traverse parts - * @param infoMap The infoMap in which implicit infos corresponding to parts are stored - * @param seen The types that were already visited previously when collecting parts for the given infoMap - * @param pending The set of static symbols for which we are currently trying to collect their parts - * in order to cache them in infoMapCache + /* Populate implicit info map by traversing all parts of type `tp`. + * This method is performance critical. + * @param tp The type for which we want to traverse parts + * @param infoMap The infoMap in which implicit infos corresponding to parts are stored + * @param seen The types that were already visited previously when collecting parts for the given infoMap + * @param pending The set of static symbols for which we are currently trying to collect their parts + * in order to cache them in infoMapCache */ def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) { if (seen(tp)) @@ -1136,7 +1136,7 @@ trait Implicits { val full = flavor == FullManifestClass val opt = flavor == OptManifestClass - /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */ + /* Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */ def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree = if (args contains EmptyTree) EmptyTree else typedPos(tree.pos.focus) { @@ -1145,12 +1145,12 @@ trait Implicits { mani } - /** Creates a tree representing one of the singleton manifests.*/ + /* Creates a tree representing one of the singleton manifests.*/ def findSingletonManifest(name: String) = typedPos(tree.pos.focus) { Select(gen.mkAttributedRef(FullManifestModule), name) } - /** Re-wraps a type in a manifest before calling inferImplicit on the result */ + /* Re-wraps a type in a manifest before calling inferImplicit on the result */ def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) = inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 9f16f65a6a..3924498628 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -304,12 +304,11 @@ trait Infer extends Checkable { def isPossiblyMissingArgs(found: Type, req: Type) = ( false - /** However it is that this condition is expected to imply - * "is possibly missing args", it is too weak. It is - * better to say nothing than to offer misleading guesses. + /* However it is that this condition is expected to imply + * "is possibly missing args", it is too weak. It is + * better to say nothing than to offer misleading guesses. - (found.resultApprox ne found) - && isWeaklyCompatible(found.resultApprox, req) + * (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req) */ ) @@ -509,8 +508,8 @@ trait Infer extends Checkable { */ def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type, pt: Type): List[Type] = { - /** Map type variable to its instance, or, if `variance` is covariant/contravariant, - * to its upper/lower bound */ + /* Map type variable to its instance, or, if `variance` is covariant/contravariant, + * to its upper/lower bound */ def instantiateToBound(tvar: TypeVar, variance: Variance): Type = { lazy val hiBounds = tvar.constr.hiBounds lazy val loBounds = tvar.constr.loBounds @@ -1250,8 +1249,7 @@ trait Infer extends Checkable { debuglog("infer constr inst "+ tree +"/"+ undetparams +"/ pt= "+ pt +" pt0= "+ pt0 +" resTp: "+ resTp) - /** Compute type arguments for undetermined params - */ + /* Compute type arguments for undetermined params */ def inferFor(pt: Type): Option[List[Type]] = { val tvars = undetparams map freshVar val resTpV = resTp.instantiateTypeParams(undetparams, tvars) @@ -1384,9 +1382,9 @@ trait Infer extends Checkable { def ptMatchesPattp = pt matchesPattern pattp.widen def pattpMatchesPt = pattp matchesPattern pt - /** If we can absolutely rule out a match we can fail early. - * This is the case if the scrutinee has no unresolved type arguments - * and is a "final type", meaning final + invariant in all type parameters. + /* If we can absolutely rule out a match we can fail early. + * This is the case if the scrutinee has no unresolved type arguments + * and is a "final type", meaning final + invariant in all type parameters. */ if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) { IncompatibleScrutineeTypeError(tree0, pattp, pt) @@ -1422,9 +1420,9 @@ trait Infer extends Checkable { } tvars foreach instantiateTypeVar } - /** If the scrutinee has free type parameters but the pattern does not, - * we have to flip the arguments so the expected type is treated as more - * general when calculating the intersection. See run/bug2755.scala. + /* If the scrutinee has free type parameters but the pattern does not, + * we have to flip the arguments so the expected type is treated as more + * general when calculating the intersection. See run/bug2755.scala. */ if (tpparams.isEmpty && ptparams.nonEmpty) intersect(pattp, pt) else intersect(pt, pattp) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index d5da4967be..e966cc9060 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -554,8 +554,8 @@ trait Namers extends MethodSynthesis { val sym = copyDef.symbol val lazyType = completerOf(copyDef) - /** Assign the types of the class parameters to the parameters of the - * copy method. See comment in `Unapplies.caseClassCopyMeth` */ + /* Assign the types of the class parameters to the parameters of the + * copy method. See comment in `Unapplies.caseClassCopyMeth` */ def assignParamTypes() { val clazz = sym.owner val constructorType = clazz.primaryConstructor.tpe @@ -985,7 +985,7 @@ trait Namers extends MethodSynthesis { var vparamSymss = enterValueParams(vparamss) - /** + /* * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type. * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter, * so the resulting type is a valid external method type, it does not contain (references to) skolems. @@ -1019,7 +1019,7 @@ trait Namers extends MethodSynthesis { res.substSym(tparamSkolems, tparamSyms) } - /** + /* * Creates a schematic method type which has WildcardTypes for non specified * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the * type schema is @@ -1043,7 +1043,7 @@ trait Namers extends MethodSynthesis { // def overriddenSymbol = meth.nextOverriddenSymbol - /** + /* * If `meth` doesn't have an explicit return type, extracts the return type from the method * overridden by `meth` (if there's an unique one). This type is lateron used as the expected * type for computing the type of the rhs. The resulting type references type skolems for @@ -1387,12 +1387,12 @@ trait Namers extends MethodSynthesis { */ def typeSig(tree: Tree): Type = { // log("typeSig " + tree) - /** For definitions, transform Annotation trees to AnnotationInfos, assign - * them to the sym's annotations. Type annotations: see Typer.typedAnnotated - * We have to parse definition annotations here (not in the typer when traversing - * the MemberDef tree): the typer looks at annotations of certain symbols; if - * they were added only in typer, depending on the compilation order, they may - * or may not be visible. + /* For definitions, transform Annotation trees to AnnotationInfos, assign + * them to the sym's annotations. Type annotations: see Typer.typedAnnotated + * We have to parse definition annotations here (not in the typer when traversing + * the MemberDef tree): the typer looks at annotations of certain symbols; if + * they were added only in typer, depending on the compilation order, they may + * or may not be visible. */ def annotate(annotated: Symbol) = { // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index ce8e0ed37b..d5ecb687b0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -111,7 +111,7 @@ trait NamesDefaults { self: Analyzer => val context = typer.context import context.unit - /** + /* * Transform a function into a block, and passing context.namedApplyBlockInfo to * the new block as side-effect. * @@ -256,7 +256,7 @@ trait NamesDefaults { self: Analyzer => } } - /** + /* * For each argument (arg: T), create a local value * x$n: T = arg * diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b32fc6b977..a1e422a7b0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -281,8 +281,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans else "") } - /** Check that all conditions for overriding `other` by `member` - * of class `clazz` are met. + /* Check that all conditions for overriding `other` by `member` + * of class `clazz` are met. */ def checkOverride(member: Symbol, other: Symbol) { debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString)) @@ -361,8 +361,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } } - /** Is the intersection between given two lists of overridden symbols empty? - */ + /* Is the intersection between given two lists of overridden symbols empty? */ def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) = !(syms1 exists (syms2 contains _)) @@ -736,9 +735,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } } - /** Returns whether there is a symbol declared in class `inclazz` - * (which must be different from `clazz`) whose name and type - * seen as a member of `class.thisType` matches `member`'s. + /* Returns whether there is a symbol declared in class `inclazz` + * (which must be different from `clazz`) whose name and type + * seen as a member of `class.thisType` matches `member`'s. */ def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = { val isVarargs = hasRepeatedParam(member.tpe) @@ -750,22 +749,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans matches(member.tpe) || (isVarargs && matches(varargsType)) } - /** The rules for accessing members which have an access boundary are more - * restrictive in java than scala. Since java has no concept of package nesting, - * a member with "default" (package-level) access can only be accessed by members - * in the exact same package. Example: + /* The rules for accessing members which have an access boundary are more + * restrictive in java than scala. Since java has no concept of package nesting, + * a member with "default" (package-level) access can only be accessed by members + * in the exact same package. Example: * - * package a.b; - * public class JavaClass { void foo() { } } + * package a.b; + * public class JavaClass { void foo() { } } * - * The member foo() can be accessed only from members of package a.b, and not - * nested packages like a.b.c. In the analogous scala class: + * The member foo() can be accessed only from members of package a.b, and not + * nested packages like a.b.c. In the analogous scala class: * - * package a.b - * class ScalaClass { private[b] def foo() = () } + * package a.b + * class ScalaClass { private[b] def foo() = () } * - * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic - * is restricting the set of matching signatures according to the above semantics. + * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic + * is restricting the set of matching signatures according to the above semantics. */ def javaAccessCheck(sym: Symbol) = ( !inclazz.isJavaDefined // not a java defined member @@ -812,7 +811,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans for (i <- 0 until seenTypes.length) seenTypes(i) = Nil - /** validate all base types of a class in reverse linear order. */ + /* validate all base types of a class in reverse linear order. */ def register(tp: Type): Unit = { // if (clazz.fullName.endsWith("Collection.Projection")) // println("validate base type "+tp) @@ -948,7 +947,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol` def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen) - /** Symbols which limit the warnings we can issue since they may be value types */ + /* Symbols which limit the warnings we can issue since they may be value types */ val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass) // Whether def equals(other: Any) has known behavior: it is the default @@ -1455,11 +1454,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans val Select(qual, _) = tree val sym = tree.symbol - /** Note: if a symbol has both @deprecated and @migration annotations and both - * warnings are enabled, only the first one checked here will be emitted. - * I assume that's a consequence of some code trying to avoid noise by suppressing - * warnings after the first, but I think it'd be better if we didn't have to - * arbitrarily choose one as more important than the other. + /* Note: if a symbol has both @deprecated and @migration annotations and both + * warnings are enabled, only the first one checked here will be emitted. + * I assume that's a consequence of some code trying to avoid noise by suppressing + * warnings after the first, but I think it'd be better if we didn't have to + * arbitrarily choose one as more important than the other. */ checkDeprecated(sym, tree.pos) if(settings.Xmigration.value != NoScalaVersion) diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index e8925ce2d0..c967fed0b9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -1,3 +1,4 @@ + /* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL * @author Martin Odersky @@ -264,7 +265,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT debuglog("alias replacement: " + tree + " ==> " + result); //debug localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true)) } else { - /** + /* * A trait which extends a class and accesses a protected member * of that class cannot implement the necessary accessor method * because its implementation is in an implementation class (e.g. diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index a2b0530c26..5dc422bc1a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -126,8 +126,7 @@ trait SyntheticMethods extends ast.TreeDSL { ) } - /** Common code for productElement and (currently disabled) productElementName - */ + /* Common code for productElement and (currently disabled) productElementName */ def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree = createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx))) @@ -135,8 +134,8 @@ trait SyntheticMethods extends ast.TreeDSL { var syntheticCanEqual = false - /** The canEqual method for case classes. - * def canEqual(that: Any) = that.isInstanceOf[This] + /* The canEqual method for case classes. + * def canEqual(that: Any) = that.isInstanceOf[This] */ def canEqualMethod: Tree = { syntheticCanEqual = true @@ -144,13 +143,13 @@ trait SyntheticMethods extends ast.TreeDSL { Ident(m.firstParam) IS_OBJ classExistentialType(clazz)) } - /** that match { case _: this.C => true ; case _ => false } - * where `that` is the given method's first parameter. + /* that match { case _: this.C => true ; case _ => false } + * where `that` is the given method's first parameter. * - * An isInstanceOf test is insufficient because it has weaker - * requirements than a pattern match. Given an inner class Foo and - * two different instantiations of the container, an x.Foo and and a y.Foo - * are both .isInstanceOf[Foo], but the one does not match as the other. + * An isInstanceOf test is insufficient because it has weaker + * requirements than a pattern match. Given an inner class Foo and + * two different instantiations of the container, an x.Foo and and a y.Foo + * are both .isInstanceOf[Foo], but the one does not match as the other. */ def thatTest(eqmeth: Symbol): Tree = { Match( @@ -162,19 +161,19 @@ trait SyntheticMethods extends ast.TreeDSL { ) } - /** (that.asInstanceOf[this.C]) - * where that is the given methods first parameter. + /* (that.asInstanceOf[this.C]) + * where that is the given methods first parameter. */ def thatCast(eqmeth: Symbol): Tree = gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe) - /** The equality method core for case classes and inline clases. - * 1+ args: - * (that.isInstanceOf[this.C]) && { - * val x$1 = that.asInstanceOf[this.C] - * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this) - * } - * Drop canBuildFrom part if class is final and canBuildFrom is synthesized + /* The equality method core for case classes and inline clases. + * 1+ args: + * (that.isInstanceOf[this.C]) && { + * val x$1 = that.asInstanceOf[this.C] + * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this) + * } + * Drop canBuildFrom part if class is final and canBuildFrom is synthesized */ def equalsCore(eqmeth: Symbol, accessors: List[Symbol]) = { val otherName = context.unit.freshTermName(clazz.name + "$") @@ -189,16 +188,16 @@ trait SyntheticMethods extends ast.TreeDSL { ) } - /** The equality method for case classes. - * 0 args: - * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this) - * 1+ args: - * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || { - * (that.isInstanceOf[this.C]) && { - * val x$1 = that.asInstanceOf[this.C] - * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this) - * } - * } + /* The equality method for case classes. + * 0 args: + * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this) + * 1+ args: + * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || { + * (that.isInstanceOf[this.C]) && { + * val x$1 = that.asInstanceOf[this.C] + * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this) + * } + * } */ def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m => if (accessors.isEmpty) @@ -208,25 +207,25 @@ trait SyntheticMethods extends ast.TreeDSL { (mkThis ANY_EQ Ident(m.firstParam)) OR equalsCore(m, accessors) } - /** The equality method for value classes - * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || { - * (that.isInstanceOf[this.C]) && { - * val x$1 = that.asInstanceOf[this.C] - * (this.underlying == that.underlying + /* The equality method for value classes + * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || { + * (that.isInstanceOf[this.C]) && { + * val x$1 = that.asInstanceOf[this.C] + * (this.underlying == that.underlying */ def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m => equalsCore(m, List(clazz.derivedValueClassUnbox)) } - /** The hashcode method for value classes + /* The hashcode method for value classes * def hashCode(): Int = this.underlying.hashCode */ def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m => Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_) } - /** The _1, _2, etc. methods to implement ProductN, disabled - * until we figure out how to introduce ProductN without cycles. + /* The _1, _2, etc. methods to implement ProductN, disabled + * until we figure out how to introduce ProductN without cycles. */ /**** def productNMethods = { @@ -308,11 +307,11 @@ trait SyntheticMethods extends ast.TreeDSL { // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam))) ) - /** If you serialize a singleton and then deserialize it twice, - * you will have two instances of your singleton unless you implement - * readResolve. Here it is implemented for all objects which have - * no implementation and which are marked serializable (which is true - * for all case objects.) + /* If you serialize a singleton and then deserialize it twice, + * you will have two instances of your singleton unless you implement + * readResolve. Here it is implemented for all objects which have + * no implementation and which are marked serializable (which is true + * for all case objects.) */ def needsReadResolve = ( clazz.isModuleClass @@ -330,8 +329,8 @@ trait SyntheticMethods extends ast.TreeDSL { else Nil ) - /** Always generate overrides for equals and hashCode in value classes, - * so they can appear in universal traits without breaking value semantics. + /* Always generate overrides for equals and hashCode in value classes, + * so they can appear in universal traits without breaking value semantics. */ def impls = { def shouldGenerate(m: Symbol) = { @@ -363,11 +362,11 @@ trait SyntheticMethods extends ast.TreeDSL { catch { case _: TypeError if reporter.hasErrors => Nil } } - /** If this case class has any less than public accessors, - * adds new accessors at the correct locations to preserve ordering. - * Note that this must be done before the other method synthesis - * because synthesized methods need refer to the new symbols. - * Care must also be taken to preserve the case accessor order. + /* If this case class has any less than public accessors, + * adds new accessors at the correct locations to preserve ordering. + * Note that this must be done before the other method synthesis + * because synthesized methods need refer to the new symbols. + * Care must also be taken to preserve the case accessor order. */ def caseTemplateBody(): List[Tree] = { val lb = ListBuffer[Tree]() diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 5c863469e4..b63c8c337b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -221,7 +221,7 @@ abstract class TreeCheckers extends Analyzer { case _: ConstantType => () case _ => checkSym(tree) - /** XXX: lots of syms show up here with accessed == NoSymbol. */ + /* XXX: lots of syms show up here with accessed == NoSymbol. */ if (accessed != NoSymbol) { val agetter = accessed.getter(sym.owner) val asetter = accessed.setter(sym.owner) @@ -248,7 +248,7 @@ abstract class TreeCheckers extends Analyzer { else if (currentOwner.ownerChain takeWhile (_ != sym) exists (_ == NoSymbol)) return fail("tree symbol "+sym+" does not point to enclosing class; tree = ") - /** XXX: temporary while Import nodes are arriving untyped. */ + /* XXX: temporary while Import nodes are arriving untyped. */ case Import(_, _) => return case _ => diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 765916bcdd..b43b4973f3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -905,7 +905,7 @@ trait Typers extends Adaptations with Tags { } } - /** + /* * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T, * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant). * @@ -2156,7 +2156,7 @@ trait Typers extends Adaptations with Tags { unit.error(pos, msg) false } - /** Have to examine all parameters in all lists. + /* Have to examine all parameters in all lists. */ def paramssTypes(tp: Type): List[List[Type]] = tp match { case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe) @@ -2175,10 +2175,10 @@ trait Typers extends Adaptations with Tags { val sym = paramType.typeSymbol def paramPos = nthParamPos(listIdx, paramIdx) - /** Not enough to look for abstract types; have to recursively check the bounds - * of each abstract type for more abstract types. Almost certainly there are other - * exploitable type soundness bugs which can be seen by bounding a type parameter - * by an abstract type which itself is bounded by an abstract type. + /* Not enough to look for abstract types; have to recursively check the bounds + * of each abstract type for more abstract types. Almost certainly there are other + * exploitable type soundness bugs which can be seen by bounding a type parameter + * by an abstract type which itself is bounded by an abstract type. */ def checkAbstract(tp0: Type, what: String): Boolean = { def check(sym: Symbol): Boolean = !sym.isAbstractType || { @@ -2864,8 +2864,8 @@ trait Typers extends Adaptations with Tags { } } - /** 'accessor' and 'accessed' are so similar it becomes very difficult to - * follow the logic, so I renamed one to something distinct. + /* 'accessor' and 'accessed' are so similar it becomes very difficult to + * follow the logic, so I renamed one to something distinct. */ def accesses(looker: Symbol, accessed: Symbol) = accessed.hasLocalFlag && ( (accessed.isParamAccessor) @@ -3122,9 +3122,9 @@ trait Typers extends Adaptations with Tags { val argslen = args.length val formals = formalTypes(paramTypes, argslen) - /** Try packing all arguments into a Tuple and apply `fun` - * to that. This is the last thing which is tried (after - * default arguments) + /* Try packing all arguments into a Tuple and apply `fun` + * to that. This is the last thing which is tried (after + * default arguments) */ def tryTupleApply: Option[Tree] = ( if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) { @@ -3145,10 +3145,10 @@ trait Typers extends Adaptations with Tags { else None ) - /** Treats an application which uses named or default arguments. - * Also works if names + a vararg used: when names are used, the vararg - * parameter has to be specified exactly once. Note that combining varargs - * and defaults is ruled out by typedDefDef. + /* Treats an application which uses named or default arguments. + * Also works if names + a vararg used: when names are used, the vararg + * parameter has to be specified exactly once. Note that combining varargs + * and defaults is ruled out by typedDefDef. */ def tryNamesDefaults: Tree = { val lencmp = compareLengths(args, formals) @@ -3258,7 +3258,7 @@ trait Typers extends Adaptations with Tags { case _ => tp } - /** + /* * This is translating uses of List() into Nil. This is less * than ideal from a consistency standpoint, but it shouldn't be * altered without due caution. @@ -3475,8 +3475,8 @@ trait Typers extends Adaptations with Tags { ErroneousAnnotation } - /** Calling constfold right here is necessary because some trees (negated - * floats and literals in particular) are not yet folded. + /* Calling constfold right here is necessary because some trees (negated + * floats and literals in particular) are not yet folded. */ def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = { // The typed tree may be relevantly different than the tree `tr`, @@ -3498,8 +3498,8 @@ trait Typers extends Adaptations with Tags { Some(LiteralAnnotArg(const)) } - /** Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails, - * an error message is reported and None is returned. + /* Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails, + * an error message is reported and None is returned. */ def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match { case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) => @@ -3998,13 +3998,13 @@ trait Typers extends Adaptations with Tags { def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection - /** Note that the trees which arrive here are potentially some distance from - * the trees of direct interest. `cxTree` is some enclosing expression which - * may apparently be arbitrarily larger than `tree`; and `tree` itself is - * too small, having at least in some cases lost its explicit type parameters. - * This logic is designed to use `tree` to pinpoint the immediately surrounding - * Apply/TypeApply/Select node, and only then creates the dynamic call. - * See SI-6731 among others. + /* Note that the trees which arrive here are potentially some distance from + * the trees of direct interest. `cxTree` is some enclosing expression which + * may apparently be arbitrarily larger than `tree`; and `tree` itself is + * too small, having at least in some cases lost its explicit type parameters. + * This logic is designed to use `tree` to pinpoint the immediately surrounding + * Apply/TypeApply/Select node, and only then creates the dynamic call. + * See SI-6731 among others. */ def findSelection(t: Tree): Option[(TermName, Tree)] = t match { case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None @@ -4063,7 +4063,7 @@ trait Typers extends Adaptations with Tags { def typedAnnotated(atd: Annotated): Tree = { val ann = atd.annot val arg1 = typed(atd.arg, mode, pt) - /** mode for typing the annotation itself */ + /* mode for typing the annotation itself */ val annotMode = (mode &~ TYPEmode) | EXPRmode def resultingTypeTree(tpe: Type) = { @@ -4339,8 +4339,8 @@ trait Typers extends Adaptations with Tags { else tpt0 } - /** If current tree appears in > - * return `tp with x.type' else return `tp`. + /* If current tree appears in > + * return `tp with x.type' else return `tp`. */ def narrowRhs(tp: Type) = { val sym = context.tree.symbol context.tree match { @@ -4409,8 +4409,8 @@ trait Typers extends Adaptations with Tags { } } - /** Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to - * insert an implicit conversion. + /* Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to + * insert an implicit conversion. */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null @@ -4646,8 +4646,8 @@ trait Typers extends Adaptations with Tags { if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree } - /** Attribute a selection where `tree` is `qual.name`. - * `qual` is already attributed. + /* Attribute a selection where `tree` is `qual.name`. + * `qual` is already attributed. */ def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = { val t = typedSelectInternal(tree, qual, name) @@ -4803,7 +4803,7 @@ trait Typers extends Adaptations with Tags { } } - /** A symbol qualifies if: + /* A symbol qualifies if: * - it exists * - it is not stale (stale symbols are made to disappear here) * - if we are in a pattern constructor, method definitions do not qualify @@ -4815,12 +4815,12 @@ trait Typers extends Adaptations with Tags { && !(inPatternConstructor && sym.isMethod && !sym.isStable) ) - /** Attribute an identifier consisting of a simple name or an outer reference. + /* Attribute an identifier consisting of a simple name or an outer reference. * - * @param tree The tree representing the identifier. - * @param name The name of the identifier. - * Transformations: (1) Prefix class members with this. - * (2) Change imported symbols to selections + * @param tree The tree representing the identifier. + * @param name The name of the identifier. + * Transformations: (1) Prefix class members with this. + * (2) Change imported symbols to selections */ def typedIdent(tree: Tree, name: Name): Tree = { // setting to enable unqualified idents in empty package (used by the repl) @@ -4962,7 +4962,7 @@ trait Typers extends Adaptations with Tags { treeCopy.PackageDef(tree, pid1, stats1) setType NoType } - /** + /* * The typer with the correct context for a method definition. If the method is a default getter for * a constructor default, the resulting typer has a constructor context (fixes SI-5543). */ diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 5f13baa107..aa4128f1a7 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -29,7 +29,7 @@ object ClassPath { private def expandS(pattern: String): List[String] = { val wildSuffix = File.separator + "*" - /** Get all subdirectories, jars, zips out of a directory. */ + /* Get all subdirectories, jars, zips out of a directory. */ def lsDir(dir: Directory, filt: String => Boolean = _ => true) = dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index f91e94471a..76b1394b85 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -163,7 +163,7 @@ object ShowPickled extends Names { out.print(" %s[%s]".format(toHexString(pflags), flagString)) } - /** Might be info or privateWithin */ + /* Might be info or privateWithin */ val x = buf.readNat() if (buf.readIndex == end) { printFlags(None) @@ -175,9 +175,9 @@ object ShowPickled extends Names { } } - /** Note: the entries which require some semantic analysis to be correctly - * interpreted are for the most part going to tell you the wrong thing. - * It's not so easy to duplicate the logic applied in the UnPickler. + /* Note: the entries which require some semantic analysis to be correctly + * interpreted are for the most part going to tell you the wrong thing. + * It's not so easy to duplicate the logic applied in the UnPickler. */ def printEntry(i: Int) { buf.readIndex = index(i) diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index 323e894b51..ae95a1bdac 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -400,9 +400,9 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with try { val Some((a, b)) = cpsR - /** Since shiftUnit is bounded [A,B,C>:B] this may not typecheck - * if C is overly specific. So if !(B <:< C), call shiftUnit0 - * instead, which takes only two type arguments. + /* Since shiftUnit is bounded [A,B,C>:B] this may not typecheck + * if C is overly specific. So if !(B <:< C), call shiftUnit0 + * instead, which takes only two type arguments. */ val conforms = a <:< b val call = localTyper.typedPos(tree.pos)( diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 43db7c55e0..c85a4fb6e7 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -555,7 +555,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { val self = buffered - /** + /* * Giving a name to following iterator (as opposed to trailing) because * anonymous class is represented as a structural type that trailing * iterator is referring (the finish() method) and thus triggering diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index a83a6fe6a1..c02ea98914 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -210,13 +210,13 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ if (!hasNext) Iterator.empty.next() - /** Calculate this result. */ + /* Calculate this result. */ val buf = self.newBuilder for(k <- 0 until nums.length; j <- 0 until nums(k)) buf += elms(offs(k)+j) val res = buf.result() - /** Prepare for the next call to next. */ + /* Prepare for the next call to next. */ var idx = nums.length - 1 while (idx >= 0 && nums(idx) == cnts(idx)) idx -= 1 diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 195aeed281..5842ff30e6 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -220,10 +220,10 @@ object NumericRange { if (!isInclusive && zero == remainder) 0 else 1 ) - /** The edge cases keep coming. Since e.g. - * Long.MaxValue + 1 == Long.MinValue - * we do some more improbable seeming checks lest - * overflow turn up as an empty range. + /* The edge cases keep coming. Since e.g. + * Long.MaxValue + 1 == Long.MinValue + * we do some more improbable seeming checks lest + * overflow turn up as an empty range. */ // The second condition contradicts an empty result. val isOverflow = longCount == 0 && num.lt(num.plus(start, step), end) == upward diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 9ab7bcc572..6d25ffe19e 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -41,9 +41,9 @@ class SyncVar[A] { * @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise */ def get(timeout: Long): Option[A] = synchronized { - /** Defending against the system clock going backward - * by counting time elapsed directly. Loop required - * to deal with spurious wakeups. + /* Defending against the system clock going backward + * by counting time elapsed directly. Loop required + * to deal with spurious wakeups. */ var rest = timeout while (!isDefined && rest > 0) { diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index d289414c26..9c7bb60475 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -108,7 +108,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val ilen = inpStack.length //Console.println(" ilen = "+ilen+ " extIndex = "+extIndex); if ((ilen != extIndex) && (ilen > 0)) { - /** for external source, inpStack == Nil ! need notify of eof! */ + /* for external source, inpStack == Nil ! need notify of eof! */ pop() } else { reachedEof = true @@ -880,7 +880,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests externalID() } else if (ch == 'P') { - /** PublicID (without system, only used in NOTATION) */ + /* PublicID (without system, only used in NOTATION) */ nextch() xToken("UBLIC") xSpace() diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index 6a24926b14..fd4d52f603 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -76,7 +76,7 @@ class ConsoleRunner extends DirectRunner { val parsed = CommandLineParser(argstr) withUnaryArgs unaryArgs withBinaryArgs binaryArgs val args = onlyValidTestPaths(parsed.residualArgs) - /** Early return on no args, version, or invalid args */ + /* Early return on no args, version, or invalid args */ if (argstr == "") return NestUI.usage() if (parsed isSet "--version") return printVersion if (parsed isSet "--help") return NestUI.usage() diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index 2ab3caa19d..78f7438429 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -350,7 +350,7 @@ object ClassfileConstants { } private def translateFlags(jflags: Int, baseFlags: Long): Long = { var res: Long = JAVA | baseFlags - /** fast, elegant, maintainable, pick any two... */ + /* fast, elegant, maintainable, pick any two... */ res |= translateFlag(jflags & JAVA_ACC_PRIVATE) res |= translateFlag(jflags & JAVA_ACC_PROTECTED) res |= translateFlag(jflags & JAVA_ACC_FINAL) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index d9f1d90b62..63178d0b39 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -197,8 +197,8 @@ trait Mirrors extends api.Mirrors { /************************ helpers ************************/ def erasureName[T: ClassTag] : String = { - /** We'd like the String representation to be a valid - * scala type, so we have to decode the jvm's secret language. + /* We'd like the String representation to be a valid + * scala type, so we have to decode the jvm's secret language. */ def erasureString(clazz: Class[_]): String = { if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]" diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a678edbe01..d7ff4faa5d 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2825,10 +2825,10 @@ trait Types * See SI-5359. */ val bounds = tparam.info.bounds - /** We can seed the type constraint with the type parameter - * bounds as long as the types are concrete. This should lower - * the complexity of the search even if it doesn't improve - * any results. + /* We can seed the type constraint with the type parameter + * bounds as long as the types are concrete. This should lower + * the complexity of the search even if it doesn't improve + * any results. */ if (propagateParameterBoundsToTypeVars) { val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType) @@ -3533,7 +3533,7 @@ trait Types if (args.isEmpty) return tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??)) - /** Disabled - causes cycles in tcpoly tests. */ + /* Disabled - causes cycles in tcpoly tests. */ if (false && isDefinitionsInitialized) { assert(isUseableAsTypeArgs(args), { val tapp_s = s"""$tycon[${args mkString ", "}]""" @@ -4596,7 +4596,7 @@ object TypesStats { val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) val typeOpsStack = Statistics.newTimerStack() - /** Commented out, because right now this does not inline, so creates a closure which will distort statistics + /* Commented out, because right now this does not inline, so creates a closure which will distort statistics @inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = { val start = Statistics.pushTimer(typeOpsStack, c) try op diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 3850f965b0..c940d863f7 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -516,18 +516,18 @@ abstract class UnPickler { var mods: Modifiers = null var name: Name = null - /** Read a Symbol, Modifiers, and a Name */ + /* Read a Symbol, Modifiers, and a Name */ def setSymModsName() { symbol = readSymbolRef() mods = readModifiersRef() name = readNameRef() } - /** Read a Symbol and a Name */ + /* Read a Symbol and a Name */ def setSymName() { symbol = readSymbolRef() name = readNameRef() } - /** Read a Symbol */ + /* Read a Symbol */ def setSym() { symbol = readSymbolRef() } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index d36aa0c927..e9d3ffbf56 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -396,11 +396,11 @@ trait TypeComparers { if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2 if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth) - /** First try, on the right: - * - unwrap Annotated types, BoundedWildcardTypes, - * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard - * - handle common cases for first-kind TypeRefs on both sides as a fast path. - */ + /* First try, on the right: + * - unwrap Annotated types, BoundedWildcardTypes, + * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard + * - handle common cases for first-kind TypeRefs on both sides as a fast path. + */ def firstTry = tp2 match { // fast path: two typerefs, none of them HK case tr2: TypeRef => @@ -445,11 +445,11 @@ trait TypeComparers { secondTry } - /** Second try, on the left: - * - unwrap AnnotatedTypes, BoundedWildcardTypes, - * - bind typevars, - * - handle existential types by skolemization. - */ + /* Second try, on the left: + * - unwrap AnnotatedTypes, BoundedWildcardTypes, + * - bind typevars, + * - handle existential types by skolemization. + */ def secondTry = tp1 match { case AnnotatedType(_, _, _) => isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && @@ -487,11 +487,11 @@ trait TypeComparers { } } - /** Third try, on the right: - * - decompose refined types. - * - handle typerefs and existentials. - * - handle left+right method types, polytypes, typebounds - */ + /* Third try, on the right: + * - decompose refined types. + * - handle typerefs and existentials. + * - handle left+right method types, polytypes, typebounds + */ def thirdTry = tp2 match { case tr2: TypeRef => thirdTryRef(tp1, tr2) @@ -532,9 +532,9 @@ trait TypeComparers { fourthTry } - /** Fourth try, on the left: - * - handle typerefs, refined types, and singleton types. - */ + /* Fourth try, on the left: + * - handle typerefs, refined types, and singleton types. + */ def fourthTry = { def retry(lhs: Type, rhs: Type) = isSubType(lhs, rhs, depth) def abstractTypeOnLeft(hi: Type) = isDifferentTypeConstructor(tp1, hi) && retry(hi, tp2) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index d225f2f087..0f9db31ec1 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -1033,7 +1033,7 @@ private[internal] trait TypeMaps { devWarning(s"$pre.$sym no longer exist at phase $phase") throw new MissingTypeControl // For build manager and presentation compiler purposes } - /** The two symbols have the same fully qualified name */ + /* The two symbols have the same fully qualified name */ def corresponds(sym1: Symbol, sym2: Symbol): Boolean = sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner)) if (!corresponds(sym.owner, rebind0.owner)) { diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index c5c28ad3e9..2e38caaf5d 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -60,7 +60,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni /** The API of a mirror for a reflective universe */ class JavaMirror(owner: Symbol, - /** Class loader that is a mastermind behind the reflexive mirror */ + /* Class loader that is a mastermind behind the reflexive mirror */ val classLoader: ClassLoader ) extends Roots(owner) with super.JavaMirror { thisMirror => -- cgit v1.2.3 From 6ec6f69be2863056c1f10c56406e5a72f2e184cb Mon Sep 17 00:00:00 2001 From: Raphael Jolly Date: Fri, 22 Mar 2013 20:17:14 +0100 Subject: Bypass determination of protection domain when resource is not in a jar --- .../scala/tools/nsc/util/AbstractFileClassLoader.scala | 10 ++++++---- src/reflect/scala/reflect/io/ZipArchive.scala | 6 +++++- 2 files changed, 11 insertions(+), 5 deletions(-) (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala index e4f879560c..b204c39e9c 100644 --- a/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala @@ -8,7 +8,6 @@ package util import scala.tools.nsc.io.AbstractFile import java.security.cert.Certificate import java.security.{ ProtectionDomain, CodeSource } -import util.ScalaClassLoader import java.net.{ URL, URLConnection, URLStreamHandler } import scala.collection.{ mutable, immutable } @@ -91,10 +90,13 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) lazy val protectionDomain = { val cl = Thread.currentThread().getContextClassLoader() val resource = cl.getResource("scala/runtime/package.class") - if (resource == null) null else { + if (resource == null || resource.getProtocol != "jar") null else { val s = resource.getPath - val path = s.substring(0, s.lastIndexOf('!')) - new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null) + val n = s.lastIndexOf('!') + if (n < 0) null else { + val path = s.substring(0, n) + new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null) + } } } diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 1342fde3c5..11d04538e9 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -259,7 +259,11 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { } def name = path - def path: String = url.getPath() match { case s => s.substring(0, s.lastIndexOf('!')) } + def path: String = { + val s = url.getPath + val n = s.lastIndexOf('!') + s.substring(0, n) + } def input = url.openStream() def lastModified = try url.openConnection().getLastModified() -- cgit v1.2.3 From cc485a9c4f5764753a7d2d64815c2de84268d5ec Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 15 Mar 2013 08:54:58 -0700 Subject: SI-5717 error when bytecode cannot be written If there's an existing file foo when emitting a class file for foo.Bar, then emit an error at foo.Bar, similar to what javac does. The error message looks something like: foo.scala:4: error: error writing object Foo: ./mypkg/Foo.class: ./mypkg is not a directory --- .../tools/nsc/backend/jvm/BytecodeWriters.scala | 25 +++++++++------ .../scala/tools/nsc/backend/jvm/GenASM.scala | 36 ++++++++++------------ test/files/run/t5717.scala | 21 +++++++++++++ 3 files changed, 54 insertions(+), 28 deletions(-) create mode 100755 test/files/run/t5717.scala (limited to 'src/compiler') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index c1cd3204e0..66aed14d1c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -6,12 +6,15 @@ package scala.tools.nsc package backend.jvm -import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile } +import java.io.{ DataOutputStream, FileOutputStream, IOException, OutputStream, File => JFile } import scala.tools.nsc.io._ import scala.tools.nsc.util.ScalaClassLoader import java.util.jar.Attributes.Name import scala.language.postfixOps +/** Can't output a file due to the state of the file system. */ +class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) + /** For the last mile: turning generated bytecode in memory into * something you can use. Has implementations for writing to class * files, jars, and disassembled/javap output. @@ -20,16 +23,20 @@ trait BytecodeWriters { val global: Global import global._ - private def outputDirectory(sym: Symbol): AbstractFile = ( - settings.outputDirs.outputDirFor(enteringFlatten(sym.sourceFile)) - ) - private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = { + private def outputDirectory(sym: Symbol): AbstractFile = + settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) + + /** + * @param clsName cls.getName + */ + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) var dir = base val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) { - dir = dir.subdirectoryNamed(part) - } - dir.fileNamed(pathParts.last + suffix) + for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part + ensureDirectory(dir) fileNamed pathParts.last + suffix } private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = getFile(outputDirectory(sym), clsName, suffix) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 4a3d1805d9..2e29633169 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -105,29 +105,30 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { "Such classes will overwrite one another on case-insensitive filesystems.") } - debuglog("Created new bytecode generator for " + classes.size + " classes.") + debuglog(s"Created new bytecode generator for ${classes.size} classes.") val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint) val plainCodeGen = new JPlainBuilder(bytecodeWriter) val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter) val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter) - while(!sortedClasses.isEmpty) { - val c = sortedClasses.head - + def emitFor(c: IClass) { if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) { - if (c.symbol.companionClass == NoSymbol) { - mirrorCodeGen.genMirrorClass(c.symbol, c.cunit) - } else { - log("No mirror class for module with linked class: " + c.symbol.fullName) - } + if (c.symbol.companionClass == NoSymbol) + mirrorCodeGen genMirrorClass (c.symbol, c.cunit) + else + log(s"No mirror class for module with linked class: ${c.symbol.fullName}") } + plainCodeGen genClass c + if (c.symbol hasAnnotation BeanInfoAttr) beanInfoCodeGen genBeanInfoClass c + } - plainCodeGen.genClass(c) - - if (c.symbol hasAnnotation BeanInfoAttr) { - beanInfoCodeGen.genBeanInfoClass(c) + while (!sortedClasses.isEmpty) { + val c = sortedClasses.head + try emitFor(c) + catch { + case e: FileConflictException => + c.cunit.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}") } - sortedClasses = sortedClasses.tail classes -= c.symbol // GC opportunity } @@ -454,7 +455,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } // ----------------------------------------------------------------------------------------- - // utitilies useful when emitting plain, mirror, and beaninfo classes. + // utilities useful when emitting plain, mirror, and beaninfo classes. // ----------------------------------------------------------------------------------------- def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) { @@ -1397,7 +1398,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { addInnerClasses(clasz.symbol, jclass) jclass.visitEnd() writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol) - } /** @@ -2903,7 +2903,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { JAVA_LANG_OBJECT.getInternalName, EMPTY_STRING_ARRAY) - log("Dumping mirror class for '%s'".format(mirrorName)) + log(s"Dumping mirror class for '$mirrorName'") // typestate: entering mode with valid call sequences: // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )* @@ -2926,8 +2926,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { mirrorClass.visitEnd() writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym) } - - } // end of class JMirrorBuilder diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala new file mode 100755 index 0000000000..a0997f5a49 --- /dev/null +++ b/test/files/run/t5717.scala @@ -0,0 +1,21 @@ +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def code = ??? + + def compileCode(code: String) = { + val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + } + // TODO + // Don't assume output is on physical disk + // Let the compiler tell us output dir + // val sc = newCompiler("-cp", classpath, "-d", testOutput.path) + // val out = sc.settings.outputDirs.getSingleOutput.get + def show(): Unit = { + // Don't crash when we find a file 'a' where package 'a' should go. + scala.reflect.io.File(testOutput.path + "/a").writeAll("a") + compileCode("package a { class B }") + } +} -- cgit v1.2.3