summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVlad Ureche <vlad.ureche@gmail.com>2013-02-03 23:47:44 +0100
committerVlad Ureche <vlad.ureche@gmail.com>2013-02-05 15:52:52 +0100
commite5c0e59373897d9f9dc6c49f8458666e88419586 (patch)
tree7e78e237d3db3c33e2feff4d811084aedfb9dd9b
parent3d318be51f8e8cdec314565920327486212f5020 (diff)
downloadscala-e5c0e59373897d9f9dc6c49f8458666e88419586.tar.gz
scala-e5c0e59373897d9f9dc6c49f8458666e88419586.tar.bz2
scala-e5c0e59373897d9f9dc6c49f8458666e88419586.zip
SI-7060 More conservative dead code elim marking
In dead code elimination, a DROP instruction that gets marked as useful and can be reached via several paths needs to also mark all the reaching definitions as useful, else we'll get unbalanced stacks on the two paths. A simplistic example: ``` BB1: CALL X // useful, leaves a LONG on the stack JUMP BB3 BB2: LOAD_FIELD Y // not useful JUMP BB3 BB3: DROP LONG // useful because "CALL X" is useful // but unless we mark "LOAD_FIELD Y" as useful too // we'll get unbalanced stacks when reaching BB3 ``` This patch addresses the unbalanced stack problem by adding all the reaching definitions of a useful DROP as useful instructions too.
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala98
-rw-r--r--test/files/pos/SI-7060.flags1
-rw-r--r--test/files/pos/SI-7060.scala11
3 files changed, 79 insertions, 31 deletions
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index d4a6d18c60..1beed3f420 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -20,7 +20,7 @@ abstract class DeadCodeElimination extends SubComponent {
/** The block and index where an instruction is located */
type InstrLoc = (BasicBlock, Int)
-
+
val phaseName = "dce"
/** Create a new phase */
@@ -68,10 +68,10 @@ abstract class DeadCodeElimination extends SubComponent {
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
-
+
/** Map from a local and a basic block to the instructions that store to that local in that basic block */
val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
-
+
/** Stores that clobber previous stores to array or ref locals. See SI-5313 */
val clobbers = mutable.Set[InstrLoc]()
@@ -91,7 +91,7 @@ abstract class DeadCodeElimination extends SubComponent {
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
collectRDef(m)
- mark
+ mark()
sweep(m)
accessedLocals = accessedLocals.distinct
val diff = m.locals diff accessedLocals
@@ -113,10 +113,25 @@ abstract class DeadCodeElimination extends SubComponent {
useful(bb) = new mutable.BitSet(bb.size)
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
+
+ // utility for adding to worklist
+ def moveToWorkList() = moveToWorkListIf(true)
+
+ // utility for (conditionally) adding to worklist
+ def moveToWorkListIf(cond: Boolean) =
+ if (cond) {
+ debuglog("in worklist: " + i)
+ worklist += ((bb, idx))
+ } else {
+ debuglog("not in worklist: " + i)
+ }
+
+ // instruction-specific logic
i match {
case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
+ moveToWorkListIf(false)
case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
@@ -135,7 +150,8 @@ abstract class DeadCodeElimination extends SubComponent {
case _ => false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
+
// add it to the localStores map
val key = (l, bb)
val set = localStores(key)
@@ -144,10 +160,15 @@ abstract class DeadCodeElimination extends SubComponent {
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); debuglog("marking " + m1)
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+ moveToWorkList()
+
+ case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
+ moveToWorkList()
+
case CALL_METHOD(m1, SuperCall(_)) =>
- worklist += ((bb, idx)) // super calls to constructor
+ moveToWorkList() // super calls to constructor
+
case DROP(_) =>
val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
val (bb1, idx1) = p
@@ -156,12 +177,13 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
case _ =>
dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
-// println("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
+ debuglog("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
case _ => ()
+ moveToWorkListIf(false)
}
rd = rdef.interpret(bb, idx, rd)
}
@@ -183,19 +205,30 @@ abstract class DeadCodeElimination extends SubComponent {
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
- // adds the instrutions that define the stack values about to be consumed to the work list to
+ // adds the instrutions that define the stack values about to be consumed to the work list to
// be marked useful
def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
debuglog(s"\t${bb1(idx1)} is consumed by $instr")
worklist += ((bb1, idx1))
}
+ // DROP logic -- if an instruction is useful, its drops are also useful
+ // and we don't mark the DROPs as useful directly but add them to the
+ // worklist so we also mark their reaching defs as useful - see SI-7060
if (!useful(bb)(idx)) {
useful(bb) += idx
dropOf.get(bb, idx) foreach {
- for ((bb1, idx1) <- _)
- useful(bb1) += idx1
+ for ((bb1, idx1) <- _) {
+ /*
+ * SI-7060: A drop that we now mark as useful can be reached via several paths,
+ * so we should follow by marking all its reaching definition as useful too:
+ */
+ debuglog("\tAdding: " + bb1(idx1) + " to the worklist, as a useful DROP.")
+ worklist += ((bb1, idx1))
+ }
}
+
+ // per-instruction logic
instr match {
case LOAD_LOCAL(l1) =>
for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
@@ -203,7 +236,7 @@ abstract class DeadCodeElimination extends SubComponent {
worklist += ((bb1, idx1))
}
- case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
addDefs()
// see SI-5313
// search for clobbers of this store if we aren't doing l1 = null
@@ -211,7 +244,7 @@ abstract class DeadCodeElimination extends SubComponent {
// a lot of null stores very cheaply
if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
findClobbers(l1, bb, idx + 1)
-
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -236,15 +269,15 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
}
-
+
/**
* Finds and marks all clobbers of the given local starting in the given
* basic block at the given index
- *
+ *
* Storing to local variables of reference or array type may be indirectly
- * observable because it may remove a reference to an object which may allow the object
- * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
- * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
* useful then its clobbers must go into the set of clobbers, which will be
* compensated for later
*/
@@ -253,20 +286,20 @@ abstract class DeadCodeElimination extends SubComponent {
val inspected = mutable.Set[BasicBlock]()
// our worklist of blocks that still need to be checked
val blocksToBeInspected = mutable.Set[BasicBlock]()
-
- // Tries to find the next clobber of l1 in bb1 starting at idx1.
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
// if it finds one it adds the clobber to clobbers set for later
- // handling. If not it adds the direct successor blocks to
+ // handling. If not it adds the direct successor blocks to
// the uninspectedBlocks to try to find clobbers there. Either way
// it adds the exception successor blocks for further search
- def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
val key = ((l, bb1))
val foundClobber = (localStores contains key) && {
def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
// find the smallest index greater than or equal to idx1
val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
- if (clobberIdx == -1)
+ if (clobberIdx == -1)
false
else {
debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
@@ -275,7 +308,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- // always need to look into the exception successors for additional clobbers
+ // always need to look into the exception successors for additional clobbers
// because we don't know when flow might enter an exception handler
blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
// If we didn't find a clobber here then we need to look at successor blocks.
@@ -284,7 +317,7 @@ abstract class DeadCodeElimination extends SubComponent {
blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
}
}
-
+
// first search starting at the current index
// note we don't put bb in the inspected list yet because a loop may later force
// us back around to search from the beginning of bb
@@ -301,14 +334,16 @@ abstract class DeadCodeElimination extends SubComponent {
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
+ debuglog("Sweeping: " + m)
+
m foreachBlock { bb =>
-// Console.println("** Sweeping block " + bb + " **")
+ debuglog(bb + ":")
val oldInstr = bb.toList
bb.open
bb.clear
for (Pair(i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
-// log(" " + i + " is useful")
+ debuglog(" * " + i + " is useful")
bb.emit(i, i.pos)
compensations.get(bb, idx) match {
case Some(is) => is foreach bb.emit
@@ -328,13 +363,13 @@ abstract class DeadCodeElimination extends SubComponent {
log(s"Eliminated instantation of $sym inside $m")
case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
// if an unused instruction was a clobber of a used store to a reference or array type
- // then we'll replace it with the store of a null to make sure the reference is
+ // then we'll replace it with the store of a null to make sure the reference is
// eliminated. See SI-5313
bb emit CONSTANT(Constant(null))
bb emit STORE_LOCAL(l)
case _ => ()
}
- debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
+ debuglog(" " + i + " [swept]")
}
}
@@ -355,6 +390,7 @@ abstract class DeadCodeElimination extends SubComponent {
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
val (bb, idx) = d
+ debuglog("rdef: "+ bb(idx))
bb(idx) match {
case DUP(_) if idx > 0 =>
bb(idx - 1) match {
diff --git a/test/files/pos/SI-7060.flags b/test/files/pos/SI-7060.flags
new file mode 100644
index 0000000000..c926ad6493
--- /dev/null
+++ b/test/files/pos/SI-7060.flags
@@ -0,0 +1 @@
+-Yinline -Ydead-code
diff --git a/test/files/pos/SI-7060.scala b/test/files/pos/SI-7060.scala
new file mode 100644
index 0000000000..c87620e020
--- /dev/null
+++ b/test/files/pos/SI-7060.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ @inline final def mbarray_apply_minibox(array: Any, tag: Byte): Long =
+ if (tag == 0) {
+ array.asInstanceOf[Array[Long]](0)
+ } else
+ array.asInstanceOf[Array[Byte]](0).toLong
+
+ def crash_method(): Unit =
+ mbarray_apply_minibox(null, 0)
+}