diff options
Diffstat (limited to 'src')
167 files changed, 3212 insertions, 4919 deletions
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala index 8bb388be8f..7ec3457c6a 100644 --- a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala +++ b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala @@ -60,15 +60,15 @@ trait Reifiers { def logFreeVars(symtab: SymbolTable): Unit = // logging free vars only when they are untyped prevents avalanches of duplicate messages symtab.syms map (sym => symtab.symDef(sym)) foreach { - case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms.value && binding.tpe == null => + case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null => reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin)) - case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes.value && binding.tpe == null => + case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null => reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin)) case _ => // do nothing } - if (universe.settings.logFreeTerms.value || universe.settings.logFreeTypes.value) + if (universe.settings.logFreeTerms || universe.settings.logFreeTypes) reification match { case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab) case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab) diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala index d43532090c..4572caeb36 100644 --- a/src/compiler/scala/reflect/reify/Phases.scala +++ b/src/compiler/scala/reflect/reify/Phases.scala @@ -25,7 +25,7 @@ trait Phases extends Reshape if (reifyDebug) println("[reshape phase]") tree = reshape.transform(tree) if (reifyDebug) println("[interlude]") - if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)) + if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)) if (reifyDebug) println("[calculate phase]") calculate.traverse(tree) diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index 9cf069fe98..11857e2172 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -57,7 +57,7 @@ abstract class Reifier extends States val result = reifee match { case tree: Tree => - reifyTrace("reifying = ")(if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) + reifyTrace("reifying = ")(if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) reifyTrace("reifee is located at: ")(tree.pos) reifyTrace("universe = ")(universe) reifyTrace("mirror = ")(mirror) @@ -140,4 +140,4 @@ abstract class Reifier extends States throw new UnexpectedReificationException(defaultErrorPosition, "reification crashed", ex) } } -}
\ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala index d57188bf6e..7338df1f72 100644 --- a/src/compiler/scala/reflect/reify/utils/Extractors.scala +++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala @@ -164,51 +164,30 @@ trait Extractors { } } - object FreeDef { - def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match { - case FreeTermDef(uref, name, binding, flags, origin) => - Some((uref, name, binding, flags, origin)) - case FreeTypeDef(uref, name, binding, flags, origin) => - Some((uref, name, binding, flags, origin)) - case _ => - None - } - } - - object FreeTermDef { - def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match { - case - ValDef(_, name, _, Apply( - Select(Select(uref1 @ Ident(_), build1), newFreeTerm), - List( - _, - _, - Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))), - Literal(Constant(origin: String))))) - if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm && - uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits => - Some((uref1, name, reifyBinding(tree), flags, origin)) - case _ => - None - } - } - - object FreeTypeDef { - def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match { - case - ValDef(_, name, _, Apply( - Select(Select(uref1 @ Ident(_), build1), newFreeType), - List( - _, - Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))), - Literal(Constant(origin: String))))) - if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType && - uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits => - Some((uref1, name, reifyBinding(tree), flags, origin)) - case _ => - None + sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) { + def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = { + def acceptFreeTermFactory(name: Name) = { + (acceptTerms && name == nme.newFreeTerm) || + (acceptTypes && name == nme.newFreeType) + } + tree match { + case + ValDef(_, name, _, Apply( + Select(Select(uref1 @ Ident(_), build1), freeTermFactory), + _ :+ + Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+ + Literal(Constant(origin: String)))) + if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) && + uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits => + Some((uref1, name, reifyBinding(tree), flags, origin)) + case _ => + None + } } } + object FreeDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = true) + object FreeTermDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = false) + object FreeTypeDef extends FreeDefExtractor(acceptTerms = false, acceptTypes = true) object FreeRef { def unapply(tree: Tree): Option[(Tree, TermName)] = tree match { diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala index c756a1b0d9..64f6758e73 100644 --- a/src/compiler/scala/tools/nsc/CompileClient.scala +++ b/src/compiler/scala/tools/nsc/CompileClient.scala @@ -26,12 +26,12 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon { val settings = new FscSettings(Console.println) val command = new OfflineCompilerCommand(args.toList, settings) val shutdown = settings.shutdown.value - val extraVmArgs = if (settings.preferIPv4.value) List("-D%s=true".format(preferIPv4Stack.key)) else Nil + val extraVmArgs = if (settings.preferIPv4) List("-D%s=true".format(preferIPv4Stack.key)) else Nil val vmArgs = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs val fscArgs = args.toList ++ command.extraFscArgs - if (settings.version.value) { + if (settings.version) { Console println versionMsg return true } diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala index fc247600f6..fbfed6110f 100644 --- a/src/compiler/scala/tools/nsc/Driver.scala +++ b/src/compiler/scala/tools/nsc/Driver.scala @@ -41,7 +41,7 @@ abstract class Driver { command = new CompilerCommand(args.toList, ss) settings = command.settings - if (settings.version.value) { + if (settings.version) { reporter.echo(versionMsg) } else if (processSettingsHook()) { val compiler = newCompiler() diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c53d6cd11e..d0b59b53cc 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -221,7 +221,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def inform(msg: String) = reporter.echo(msg) override def globalError(msg: String) = reporter.error(NoPosition, msg) override def warning(msg: String) = - if (settings.fatalWarnings.value) globalError(msg) + if (settings.fatalWarnings) globalError(msg) else reporter.warning(NoPosition, msg) // Getting in front of Predef's asserts to supplement with more info. @@ -252,7 +252,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } @inline final def ifDebug(body: => Unit) { - if (settings.debug.value) + if (settings.debug) body } /** This is for WARNINGS which should reach the ears of scala developers @@ -262,7 +262,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) * to make them visually distinct. */ @inline final override def devWarning(msg: => String) { - if (settings.developer.value || settings.debug.value) + if (settings.developer || settings.debug) warning("!!! " + msg) else log("!!! " + msg) // such warnings always at least logged @@ -272,7 +272,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) msg + " in " + (currentTime - start) + "ms" def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) - def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]") + def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]") def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) def logError(msg: String, t: Throwable): Unit = () @@ -287,7 +287,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } @inline final override def debuglog(msg: => String) { - if (settings.debug.value) + if (settings.debug) log(msg) } @@ -332,7 +332,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - if (settings.verbose.value || settings.Ylogcp.value) { + if (settings.verbose || settings.Ylogcp) { // Uses the "do not truncate" inform informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]") informComplete("[search path for class files: " + classPath.asClasspathString + "]") @@ -402,7 +402,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source - if (settings.debug.value && (settings.verbose.value || currentRun.size < 5)) + if (settings.debug && (settings.verbose || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") val unit0 = currentUnit @@ -736,7 +736,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val maxName = (0 /: phaseNames)(_ max _.length) val width = maxName min Limit val maxDesc = MaxCol - (width + 6) // descriptions not novels - val fmt = if (settings.verbose.value) s"%${maxName}s %2s %s%n" + val fmt = if (settings.verbose) s"%${maxName}s %2s %s%n" else s"%${width}.${width}s %2s %.${maxDesc}s%n" val line1 = fmt.format("phase name", "id", "description") @@ -1097,7 +1097,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val info3: List[String] = ( ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) ) ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) ) - ++ ( if (!settings.debug.value) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) ) + ++ ( if (!settings.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) ) ++ ( List(errorMessage) ) ) @@ -1105,7 +1105,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) ("\n" + info1) :: info2 :: info3 mkString "\n\n" } - catch { case x: Exception => errorMessage } + catch { case _: Exception | _: TypeError => errorMessage } /** The id of the currently active run */ @@ -1113,7 +1113,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def echoPhaseSummary(ph: Phase) = { /* Only output a summary message under debug if we aren't echoing each file. */ - if (settings.debug.value && !(settings.verbose.value || currentRun.size < 5)) + if (settings.debug && !(settings.verbose || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } @@ -1121,10 +1121,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) class ConditionalWarning(what: String, option: Settings#BooleanSetting) { val warnings = mutable.LinkedHashMap[Position, String]() def warn(pos: Position, msg: String) = - if (option.value) reporter.warning(pos, msg) + if (option) reporter.warning(pos, msg) else if (!(warnings contains pos)) warnings += ((pos, msg)) def summarize() = - if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings.value)) + if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings)) warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name)) } @@ -1240,7 +1240,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def resetProjectClasses(root: Symbol): Unit = try { def unlink(sym: Symbol) = if (sym != NoSymbol) root.info.decls.unlink(sym) - if (settings.verbose.value) inform("[reset] recursing in "+root) + if (settings.verbose) inform("[reset] recursing in "+root) val toReload = mutable.Set[String]() for (sym <- root.info.decls) { if (sym.isInitialized && clearOnNextRun(sym)) @@ -1260,7 +1260,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) for (fullname <- toReload) classPath.findClass(fullname) match { case Some(classRep) => - if (settings.verbose.value) inform("[reset] reinit "+fullname) + if (settings.verbose) inform("[reset] reinit "+fullname) loaders.initializeFromClassPath(root, classRep) case _ => } @@ -1453,7 +1453,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } def reportCompileErrors() { - if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings.value) + if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings) globalError("No warnings can be incurred under -Xfatal-warnings.") if (reporter.hasErrors) { @@ -1492,7 +1492,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def compileUnits(units: List[CompilationUnit], fromPhase: Phase) { try compileUnitsInternal(units, fromPhase) catch { case ex: Throwable => - val shown = if (settings.verbose.value) + val shown = if (settings.verbose) stackTraceString(ex) else stackTraceHeadString(ex) // note that error stacktraces do not print in fsc @@ -1526,14 +1526,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (shouldWriteIcode) { // Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given. writeICode() - } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate.value && runIsAt(cleanupPhase)) { + } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) { // print trees - if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) nodePrinters.printAll() + if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll() else printAllUnits() } // print the symbols presently attached to AST nodes - if (settings.Yshowsyms.value) + if (settings.Yshowsyms) trackerFactory.snapshot() // print members @@ -1552,7 +1552,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) runCheckers() // output collected statistics - if (settings.Ystatistics.value) + if (settings.Ystatistics) statistics.print(phase) advancePhase() @@ -1697,7 +1697,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) informProgress("wrote " + file) } catch { case ex: IOException => - if (settings.debug.value) ex.printStackTrace() + if (settings.debug) ex.printStackTrace() globalError("could not write file " + file) } }) diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index 00c6c37dfd..9f6f483ad8 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -17,7 +17,7 @@ class MainClass extends Driver with EvalLoop { override def newCompiler(): Global = Global(settings, reporter) override def doCompile(compiler: Global) { - if (settings.resident.value) resident(compiler) + if (settings.resident) resident(compiler) else super.doCompile(compiler) } } diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index 584805b37e..e2893204e0 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -42,7 +42,7 @@ object MainTokenMetric { tokenMetric(compiler, command.files) } catch { case ex @ FatalError(msg) => - if (command.settings.debug.value) + if (command.settings.debug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 821e88e52e..5f8bc71449 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -95,7 +95,7 @@ class ScriptRunner extends HasCompileSocket { settings.outdir.value = compiledPath.path - if (settings.nc.value) { + if (settings.nc) { /* Setting settings.script.value informs the compiler this is not a * self contained compilation unit. */ @@ -114,7 +114,7 @@ class ScriptRunner extends HasCompileSocket { * not take place until there are no non-daemon threads running. Tickets #1955, #2006. */ util.waitingForThreads { - if (settings.save.value) { + if (settings.save) { val jarFile = jarFileFor(scriptFile) def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile)) diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala index 602366a201..caab299635 100644 --- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala +++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala @@ -32,7 +32,7 @@ abstract class NodePrinters { } trait DefaultPrintAST extends PrintAST { - val printPos = settings.Xprintpos.value || settings.Yposdebug.value + val printPos = settings.Xprintpos || settings.Yposdebug def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name) def showDefTreeName(tree: DefTree) = showName(tree.name) @@ -100,9 +100,9 @@ abstract class NodePrinters { def stringify(tree: Tree): String = { buf.clear() - if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL) - if (settings.XshowtreesCompact.value) { - buf.append(showRaw(tree, printIds = settings.uniqid.value, printTypes = settings.printtypes.value)) + if (settings.XshowtreesStringified) buf.append(tree.toString + EOL) + if (settings.XshowtreesCompact) { + buf.append(showRaw(tree, printIds = settings.uniqid, printTypes = settings.printtypes)) } else { level = 0 traverse(tree) diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index 66d75969e9..beab801edf 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -25,6 +25,6 @@ trait Positions extends scala.reflect.internal.Positions { } override protected[this] lazy val posAssigner: PosAssigner = - if (settings.Yrangepos.value && settings.debug.value || settings.Yposdebug.value) new ValidatingPosAssigner + if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner else new DefaultPosAssigner } diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index 7fefb2ce0c..72538bac08 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -200,15 +200,15 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => override def printTree(tree: Tree) { print(safe(tree)) } } - def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) - def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) + def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymkinds) + def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymkinds) def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true) def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer) override def newTreePrinter(writer: PrintWriter): TreePrinter = - if (settings.Ycompacttrees.value) newCompactTreePrinter(writer) + if (settings.Ycompacttrees) newCompactTreePrinter(writer) else newStandardTreePrinter(writer) override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream)) override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter)) diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 6a0f4407fc..0731d78a9b 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -87,12 +87,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { case DocDef(_, definition) => isPureDef(definition) case _ => super.isPureDef(tree) } - - /** Does list of trees start with a definition of - * a class of module with given name (ignoring imports) - */ - override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match { - case ClassDef(_, `name`, _, _) :: Nil => true - case _ => super.firstDefinesClassOrObject(trees, name) - } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 68e198d071..7671912651 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2085,7 +2085,7 @@ self => val name = ident() var bynamemod = 0 val tpt = - if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) { + if (settings.YmethodInfer && !owner.isTypeName && in.token != COLON) { TypeTree() } else { // XX-METHOD-INFER accept(COLON) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index b485e862fd..eb31f7a66e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -446,7 +446,7 @@ trait Scanners extends ScannersCommon { * there a realistic situation where one would need it? */ if (isDigit(ch)) { - if (settings.future.value) syntaxError("Non-zero numbers may not have a leading zero.") + if (settings.future) syntaxError("Non-zero numbers may not have a leading zero.") else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.") } base = 8 @@ -990,7 +990,7 @@ trait Scanners extends ScannersCommon { /* As of scala 2.11, it isn't a number unless c here is a digit, so * settings.future.value excludes the rest of the logic. */ - if (settings.future.value && !isDigit(c)) + if (settings.future && !isDigit(c)) return setStrVal() val isDefinitelyNumber = (c: @switch) match { @@ -1217,7 +1217,7 @@ trait Scanners extends ScannersCommon { */ class SourceFileScanner(val source: SourceFile) extends Scanner { val buf = source.content - override val decodeUni: Boolean = !settings.nouescape.value + override val decodeUni: Boolean = !settings.nouescape // suppress warnings, throw exception on errors def deprecationWarning(off: Offset, msg: String): Unit = () diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index 7cf5a07291..80d70e6428 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -32,7 +32,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse else newUnitParser(unit).smartParse() } - if (settings.Yrangepos.value && !reporter.hasErrors) + if (settings.Yrangepos && !reporter.hasErrors) validatePositions(unit.body) } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 89682e91d2..94fbba8066 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -331,7 +331,7 @@ trait BasicBlocks { assert(!closed || ignore, this) if (ignore) { - if (settings.debug.value) { + if (settings.debug) { /* Trying to pin down what it's likely to see after a block has been * put into ignore mode so we hear about it if there's a problem. */ @@ -405,11 +405,11 @@ trait BasicBlocks { * is discovered to be unreachable. */ def killIf(cond: Boolean) { - if (!settings.YdisableUnreachablePrevention.value && cond) { + if (!settings.YdisableUnreachablePrevention && cond) { debuglog(s"Killing block $this") assert(instructionList.isEmpty, s"Killing a non empty block $this") // only checked under debug because fetching predecessor list is moderately expensive - if (settings.debug.value) + if (settings.debug) assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}") close() diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 8ba75fbb46..31028e64d3 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -39,7 +39,7 @@ abstract class GenICode extends SubComponent { override def newPhase(prev: Phase) = new ICodePhase(prev) @inline private def debugassert(cond: => Boolean, msg: => Any) { - if (settings.debug.value) + if (settings.debug) assert(cond, msg) } @@ -129,7 +129,7 @@ abstract class GenICode extends SubComponent { case Return(_) => () case EmptyTree => globalError("Concrete method has no definition: " + tree + ( - if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")" + if (settings.debug) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")" else "") ) case _ => if (ctx1.bb.isEmpty) @@ -710,7 +710,7 @@ abstract class GenICode extends SubComponent { debuglog("BOX : " + fun.symbol.fullName) val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe)) val nativeKind = toTypeKind(expr.tpe) - if (settings.Xdce.value) { + if (settings.Xdce) { // we store this boxed value to a local, even if not really needed. // boxing optimization might use it, and dead code elimination will // take care of unnecessary stores @@ -1483,7 +1483,7 @@ abstract class GenICode extends SubComponent { if (mustUseAnyComparator) { // when -optimise is on we call the @inline-version of equals, found in ScalaRunTime val equalsMethod = - if (!settings.optimise.value) { + if (!settings.optimise) { def default = platform.externalEquals platform match { case x: JavaPlatform => @@ -1507,7 +1507,7 @@ abstract class GenICode extends SubComponent { val ctx2 = genLoad(r, ctx1, ObjectReference) val branchesReachable = !ctx2.bb.ignore ctx2.bb.emitOnly( - CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(onInstance = false)), + CALL_METHOD(equalsMethod, if (settings.optimise) Dynamic else Static(onInstance = false)), CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) ) branchesReachable @@ -2032,7 +2032,7 @@ abstract class GenICode extends SubComponent { // Generate the catch-all exception handler that deals with uncaught exceptions coming // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows // the exception - if (settings.YdisableUnreachablePrevention.value || !outerCtx.bb.ignore) { + if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) { if (finalizer != EmptyTree) { val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers this.addActiveHandler(exh) // .. and body aswell diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index b7b07a579f..0cdf629ce1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -94,7 +94,7 @@ abstract class ICodeCheckers { } def checkICodes(): Unit = { - if (settings.verbose.value) + if (settings.verbose) println("[[consistency check at the beginning of phase " + globalPhase.name + "]]") classes.values foreach check } @@ -444,7 +444,7 @@ abstract class ICodeCheckers { def checkBool(cond: Boolean, msg: String) = if (!cond) icodeError(msg) - if (settings.debug.value) { + if (settings.debug) { log("PC: " + instr) log("stack: " + stack) log("================") diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala index e2d387c65d..3f2141782a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala @@ -35,7 +35,7 @@ abstract class ICodes extends AnyRef /** Debugging flag */ def shouldCheckIcode = settings.check contains global.genicode.phaseName - def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug.value) println(msg) + def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug) println(msg) /** The ICode linearizer. */ val linearizer: Linearizer = settings.Xlinearizer.value match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index e471f4256b..c1cda2c863 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -61,7 +61,7 @@ trait Members { startBlock = newBlock() def removeBlock(b: BasicBlock) { - if (settings.debug.value) { + if (settings.debug) { // only do this sanity check when debug is turned on because it's moderately expensive val referers = blocks filter (_.successors contains b) assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}") @@ -172,6 +172,7 @@ trait Members { var returnType: TypeKind = _ var recursive: Boolean = false var bytecodeHasEHs = false // set by ICodeReader only, used by Inliner to prevent inlining (SI-6188) + var bytecodeHasInvokeDynamic = false // set by ICodeReader only, used by Inliner to prevent inlining until we have proper invoke dynamic support /** local variables and method parameters */ var locals: List[Local] = Nil diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index d8aac8e9db..ff118be3c4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -394,6 +394,25 @@ trait Opcodes { self: ICodes => override def category = mthdsCat } + + /** + * A place holder entry that allows us to parse class files with invoke dynamic + * instructions. Because the compiler doesn't yet really understand the + * behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze + * this instruction will cause a failure. The only optimization that + * should ever look at non-Scala generated icode is the inliner, and it + * has been modified to not examine any method with invokeDynamic + * instructions. So if this poison pill ever causes problems then + * there's been a serious misunderstanding + */ + // TODO do the real thing + case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction { + private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed") + override def consumed = error + override def produced = error + override def producedTypes = error + override def category = error + } case class BOX(boxType: TypeKind) extends Instruction { assert(boxType.isValueType && (boxType ne UNIT)) // documentation diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala index 5b47e3cfff..1fe33f78e7 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala @@ -110,7 +110,7 @@ trait Printers { self: ICodes => print(bb.label) if (bb.loopHeader) print("[loop header]") print(": ") - if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString) + if (settings.debug) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString) indent(); println() bb.toList foreach printInstruction undent(); println() diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala index e92e61c957..10d57df4a3 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala @@ -39,7 +39,7 @@ trait Repository { } catch { case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage)) - if (settings.debug.value) { e.printStackTrace } + if (settings.debug) { e.printStackTrace } false } diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 152a11ab1a..338a07c872 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -199,7 +199,7 @@ abstract class CopyPropagation { override def run() { forwardAnalysis(blockTransfer) - if (settings.debug.value) { + if (settings.debug) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(in(b) != lattice.bottom, "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?")) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala index 14b57f287f..60f7857d0c 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala @@ -74,7 +74,7 @@ abstract class Liveness { override def run() { backwardAnalysis(blockTransfer) - if (settings.debug.value) { + if (settings.debug) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(lattice.bottom != in(b), "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?")) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala index 2d29e6b14f..26b7bc50d8 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala @@ -51,7 +51,7 @@ abstract class ReachingDefinitions { // it'd be nice not to call zip with mismatched sequences because // it makes it harder to spot the real problems. val result = (a.stack, b.stack).zipped map (_ ++ _) - if (settings.debug.value && (a.stack.length != b.stack.length)) + if (settings.debug && (a.stack.length != b.stack.length)) devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result") result } @@ -141,7 +141,7 @@ abstract class ReachingDefinitions { override def run() { forwardAnalysis(blockTransfer) - if (settings.debug.value) { + if (settings.debug) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(lattice.bottom != in(b), "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 227c1064ea..57380db7e7 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -136,7 +136,7 @@ abstract class TypeFlowAnalysis { // icodes.lubs0 = 0 forwardAnalysis(blockTransfer) timer.stop - if (settings.debug.value) { + if (settings.debug) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(visited.contains(b), "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)) @@ -167,7 +167,7 @@ abstract class TypeFlowAnalysis { val bindings = out.vars val stack = out.stack - if (settings.debug.value) { + if (settings.debug) { // Console.println("[before] Stack: " + stack); // Console.println(i); } @@ -367,7 +367,7 @@ abstract class TypeFlowAnalysis { preCandidates += rc._2.bb } - if (settings.debug.value) { + if (settings.debug) { for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) { assert(visited.contains(b), "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 78fb109b42..66a58870cc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -86,10 +86,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { override def run() { - if (settings.debug.value) + if (settings.debug) inform("[running phase " + name + " on icode]") - if (settings.Xdce.value) + if (settings.Xdce) for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) { log(s"Optimizer eliminated ${sym.fullNameString}") icodes.classes -= sym @@ -804,7 +804,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // without it. This is particularly bad because the availability of // generic information could disappear as a consequence of a seemingly // unrelated change. - settings.Ynogenericsig.value + settings.Ynogenericsig || sym.isArtifact || sym.isLiftedMethod || sym.isBridge @@ -834,7 +834,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { catch { case _: Throwable => false } } - if (settings.Xverify.value) { + if (settings.Xverify) { // Run the signature parser to catch bogus signatures. val isValidSignature = wrap { // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser) @@ -1362,7 +1362,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for (constructor <- c.lookupStaticCtor) { addStaticInit(Some(constructor)) } - val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders.value) + val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders) if (!skipStaticForwarders) { val lmoc = c.symbol.companionModule // add static forwarders if there are no name conflicts; see bugs #363 and #1735 @@ -2251,16 +2251,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // info calls so that types are up to date; erasure may add lateINTERFACE to traits hostSymbol.info ; methodOwner.info - def isInterfaceCall(sym: Symbol) = ( - sym.isInterface && methodOwner != ObjectClass + def needsInterfaceCall(sym: Symbol) = ( + sym.isInterface || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass) ) // whether to reference the type of the receiver or - // the type of the method owner (if not an interface!) + // the type of the method owner val useMethodOwner = ( style != Dynamic - || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol) || hostSymbol.isBottomClass + || methodOwner == ObjectClass ) val receiver = if (useMethodOwner) methodOwner else hostSymbol val jowner = javaName(receiver) @@ -2283,11 +2283,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } style match { - case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype) - case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype) - case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype) - case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype) - case SuperCall(_) => + case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype) + case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype) + case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype) + case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype) + case SuperCall(_) => dbg("invokespecial") jcode.invokespecial(jowner, jname, jtype) initModule() @@ -3220,7 +3220,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val detour = computeDetour rephraseGotos(detour) - if (settings.debug.value) { + if (settings.debug) { val (remappings, cycles) = detour partition {case (source, target) => source != target} for ((source, target) <- remappings) { debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.") @@ -3273,7 +3273,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } // remove the unusued exception handler references - if (settings.debug.value) + if (settings.debug) for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks") m.exh = m.exh filterNot unusedExceptionHandlers @@ -3287,7 +3287,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def normalize(m: IMethod) { if(!m.hasCode) { return } collapseJumpOnlyBlocks(m) - if (settings.optimise.value) + if (settings.optimise) elimUnreachableBlocks(m) icodes checkValid m } diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index 8f439fc800..aaf2c55dcd 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -82,7 +82,7 @@ abstract class ClosureElimination extends SubComponent { * */ class ClosureElim { - def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) { + def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim) { log(s"Analyzing ${cls.methods.size} methods in $cls.") cls.methods foreach { m => analyzeMethod(m) diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index 7187bacb06..ff93206ffd 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -41,7 +41,7 @@ abstract class ConstantOptimization extends SubComponent { def name = phaseName override def apply(c: IClass) { - if (settings.YconstOptimization.value) { + if (settings.YconstOptimization) { val analyzer = new ConstantOptimizer analyzer optimizeClass c } diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 7042d7a042..1026e95fac 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -33,7 +33,7 @@ abstract class DeadCodeElimination extends SubComponent { val dce = new DeadCode() override def apply(c: IClass) { - if (settings.Xdce.value) + if (settings.Xdce) dce.analyzeClass(c) } } diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index dcf0590951..cecabda171 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -88,7 +88,7 @@ abstract class InlineExceptionHandlers extends SubComponent { /** Apply exception handler inlining to a class */ override def apply(c: IClass): Unit = - if (settings.inlineHandlers.value) { + if (settings.inlineHandlers) { val startTime = System.currentTimeMillis currentClass = c diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 555c79e75e..a6eedbd07e 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -265,7 +265,7 @@ abstract class Inliners extends SubComponent { } def analyzeClass(cls: IClass): Unit = - if (settings.inline.value) { + if (settings.inline) { inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls") this.currentIClazz = cls @@ -319,7 +319,7 @@ abstract class Inliners extends SubComponent { * */ def analyzeMethod(m: IMethod): Unit = { // m.normalize - if (settings.debug.value) + if (settings.debug) inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName) val sizeBeforeInlining = m.code.blockCount @@ -383,7 +383,7 @@ abstract class Inliners extends SubComponent { def warnNoInline(reason: String): Boolean = { def msg = "Could not inline required method %s because %s.".format(i.method.unexpandedName.decode, reason) - if (settings.debug.value) + if (settings.debug) inlineLog("fail", i.method.fullName, reason) if (shouldWarn) warn(i.pos, msg) @@ -935,7 +935,7 @@ abstract class Inliners extends SubComponent { // add exception handlers of the callee caller addHandlers (inc.handlers map translateExh) assert(pending.isEmpty, "Pending NEW elements: " + pending) - if (settings.debug.value) icodes.checkValid(caller.m) + if (settings.debug) icodes.checkValid(caller.m) } def isStampedForInlining(stackLength: Int): InlineSafetyInfo = { @@ -958,6 +958,7 @@ abstract class Inliners extends SubComponent { if(isInlineForbidden) { rs ::= "is annotated @noinline" } if(inc.isSynchronized) { rs ::= "is synchronized method" } if(inc.m.bytecodeHasEHs) { rs ::= "bytecode contains exception handlers / finally clause" } // SI-6188 + if(inc.m.bytecodeHasInvokeDynamic) { rs ::= "bytecode contains invoke dynamic" } if(rs.isEmpty) null else rs.mkString("", ", and ", "") } diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index cf40fe90fa..f1b1d1a9a7 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -755,13 +755,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val pos = in.currentPos val name = identForType() val (statics, body) = typeBody(AT, name) - def getValueMethodType(tree: Tree) = tree match { - case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate) - case _ => None - } - var templ = makeTemplate(annotationParents, body) - for (stat <- templ.body; tpt <- getValueMethodType(stat)) - templ = makeTemplate(annotationParents, makeConstructor(List(tpt)) :: templ.body) + val templ = makeTemplate(annotationParents, body) addCompanionObject(statics, atPos(pos) { ClassDef(mods, name, List(), templ) }) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 71b97e86a6..a591482392 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -62,7 +62,7 @@ trait Plugins { def withPlug = plug :: pick(tail, plugNames + plug.name, phaseNames ++ plugPhaseNames) lazy val commonPhases = phaseNames intersect plugPhaseNames - def note(msg: String): Unit = if (settings.verbose.value) inform(msg format plug.name) + def note(msg: String): Unit = if (settings.verbose) inform(msg format plug.name) def fail(msg: String) = { note(msg) ; withoutPlug } if (plugNames contains plug.name) diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index 44670ea578..a67c207820 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -43,7 +43,7 @@ abstract class AbstractReporter extends Reporter { if (!hidden || isPromptSet) { severity.count += 1 display(pos, msg, severity) - } else if (settings.debug.value) { + } else if (settings.debug) { severity.count += 1 display(pos, "[ suppressed ] " + msg, severity) } diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 61ac07d18f..250feb69bf 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -31,7 +31,7 @@ abstract class SymbolLoaders { } protected def signalError(root: Symbol, ex: Throwable) { - if (settings.debug.value) ex.printStackTrace() + if (settings.debug) ex.printStackTrace() globalError(ex.getMessage() match { case null => "i/o error while loading " + root.name case msg => "error while loading " + root.name + ", " + msg @@ -104,8 +104,15 @@ abstract class SymbolLoaders { val clazz = enterClass(root, name, completer) val module = enterModule(root, name, completer) if (!clazz.isAnonymousClass) { - assert(clazz.companionModule == module, module) - assert(module.companionClass == clazz, clazz) + // Diagnostic for SI-7147 + def msg: String = { + def symLocation(sym: Symbol) = if (sym == null) "null" else s"${clazz.fullLocationString} (from ${clazz.associatedFile})" + sm"""Inconsistent class/module symbol pair for `$name` loaded from ${symLocation(root)}. + |clazz = ${symLocation(clazz)}; clazz.companionModule = ${clazz.companionModule} + |module = ${symLocation(module)}; module.companionClass = ${module.companionClass}""" + } + assert(clazz.companionModule == module, msg) + assert(module.companionClass == clazz, msg) } } @@ -136,10 +143,10 @@ abstract class SymbolLoaders { ((classRep.binary, classRep.source) : @unchecked) match { case (Some(bin), Some(src)) if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) => - if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path) + if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path) global.loaders.enterToplevelsFromSource(owner, classRep.name, src) case (None, Some(src)) => - if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path) + if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path) global.loaders.enterToplevelsFromSource(owner, classRep.name, src) case (Some(bin), _) => global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin)) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index 035244e421..daaa625164 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -127,7 +127,7 @@ trait SymbolTrackers { else " (" + Flags.flagsToString(masked) + ")" } def symString(sym: Symbol) = ( - if (settings.debug.value && sym.hasCompleteInfo) { + if (settings.debug && sym.hasCompleteInfo) { val s = sym.defString take 240 if (s.length == 240) s + "..." else s } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 1206748b24..a8a47205dd 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -12,9 +12,11 @@ import java.lang.Integer.toHexString import scala.collection.{ mutable, immutable } import scala.collection.mutable.{ ListBuffer, ArrayBuffer } import scala.annotation.switch +import scala.reflect.internal.{ JavaAccFlags } import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs} import scala.tools.nsc.io.AbstractFile + /** This abstract class implements a class file parser. * * @author Martin Odersky @@ -23,6 +25,7 @@ import scala.tools.nsc.io.AbstractFile abstract class ClassfileParser { val global: Global import global._ + import definitions._ import scala.reflect.internal.ClassfileConstants._ import Flags._ @@ -40,21 +43,41 @@ abstract class ClassfileParser { protected var classTParams = Map[Name,Symbol]() protected var srcfile0 : Option[AbstractFile] = None protected def moduleClass: Symbol = staticModule.moduleClass + private var sawPrivateConstructor = false + + private def ownerForFlags(jflags: JavaAccFlags) = if (jflags.isStatic) moduleClass else clazz def srcfile = srcfile0 + private def optimized = global.settings.optimise.value private def currentIsTopLevel = !(currentClass.decodedName containsChar '$') + // u1, u2, and u4 are what these data types are called in the JVM spec. + // They are an unsigned byte, unsigned char, and unsigned int respectively. + // We bitmask u1 into an Int to make sure it's 0-255 (and u1 isn't used + // for much beyond tags) but leave u2 alone as it's already unsigned. + protected final def u1(): Int = in.nextByte & 0xFF + protected final def u2(): Char = in.nextChar + protected final def u4(): Int = in.nextInt + + private def readInnerClassFlags() = readClassFlags() + private def readClassFlags() = JavaAccFlags classFlags u2 + private def readMethodFlags() = JavaAccFlags methodFlags u2 + private def readFieldFlags() = JavaAccFlags fieldFlags u2 + private def readTypeName() = readName().toTypeName + private def readName() = pool getName u2 + private def readType() = pool getType u2 + private object unpickler extends scala.reflect.internal.pickling.UnPickler { val global: ClassfileParser.this.global.type = ClassfileParser.this.global } private def handleMissing(e: MissingRequirementError) = { - if (settings.debug.value) e.printStackTrace + if (settings.debug) e.printStackTrace throw new IOException("Missing dependency '" + e.req + "', required by " + in.file) } private def handleError(e: Exception) = { - if (settings.debug.value) e.printStackTrace() + if (settings.debug) e.printStackTrace() throw new IOException("class file '%s' is broken\n(%s/%s)".format( in.file, e.getClass, @@ -106,65 +129,60 @@ abstract class ClassfileParser { } private def parseHeader() { - val magic = in.nextInt + val magic = u4 if (magic != JAVA_MAGIC) - throw new IOException("class file '" + in.file + "' " - + "has wrong magic number 0x" + toHexString(magic) - + ", should be 0x" + toHexString(JAVA_MAGIC)) - val minorVersion = in.nextChar.toInt - val majorVersion = in.nextChar.toInt - if ((majorVersion < JAVA_MAJOR_VERSION) || - ((majorVersion == JAVA_MAJOR_VERSION) && - (minorVersion < JAVA_MINOR_VERSION))) - throw new IOException("class file '" + in.file + "' " - + "has unknown version " - + majorVersion + "." + minorVersion - + ", should be at least " - + JAVA_MAJOR_VERSION + "." + JAVA_MINOR_VERSION) + abort(s"class file ${in.file} has wrong magic number 0x${toHexString(magic)}") + + val minor, major = u2 + if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION) + abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") } class ConstantPool { - private val len = in.nextChar - private val starts = new Array[Int](len) - private val values = new Array[AnyRef](len) + private val len = u2 + private val starts = new Array[Int](len) + private val values = new Array[AnyRef](len) private val internalized = new Array[Name](len) { var i = 1 while (i < starts.length) { starts(i) = in.bp i += 1 - (in.nextByte.toInt: @switch) match { - case CONSTANT_UTF8 | CONSTANT_UNICODE => - in.skip(in.nextChar) - case CONSTANT_CLASS | CONSTANT_STRING => - in.skip(2) - case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF - | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT => - in.skip(4) - case CONSTANT_LONG | CONSTANT_DOUBLE => - in.skip(8) - i += 1 - case _ => - errorBadTag(in.bp - 1) + (u1.toInt: @switch) match { + case CONSTANT_UTF8 | CONSTANT_UNICODE => in skip u2 + case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => in skip 2 + case CONSTANT_METHODHANDLE => in skip 3 + case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF => in skip 4 + case CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT => in skip 4 + case CONSTANT_INVOKEDYNAMIC => in skip 4 + case CONSTANT_LONG | CONSTANT_DOUBLE => in skip 8 ; i += 1 + case _ => errorBadTag(in.bp - 1) } } } - /** Return the name found at given index. */ - def getName(index: Int): Name = { - if (index <= 0 || len <= index) - errorBadIndex(index) + def recordAtIndex[T <: AnyRef](value: T, idx: Int): T = { + values(idx) = value + value + } - values(index) match { + def firstExpecting(index: Int, expected: Int): Int = { + val start = starts(index) + val first = in.buf(start).toInt + if (first == expected) start + 1 + else this errorBadTag start + } + + /** Return the name found at given index. */ + def getName(index: Int): Name = ( + if (index <= 0 || len <= index) errorBadIndex(index) + else values(index) match { case name: Name => name - case null => - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start) - val name = newTermName(in.buf, start + 3, in.getChar(start + 1)) - values(index) = name - name + case _ => + val start = firstExpecting(index, CONSTANT_UTF8) + recordAtIndex(newTermName(in.buf, start + 2, in.getChar(start)), index) } - } + ) /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */ def getExternalName(index: Int): Name = { @@ -179,28 +197,23 @@ abstract class ClassfileParser { def getClassSymbol(index: Int): Symbol = { if (index <= 0 || len <= index) errorBadIndex(index) - var c = values(index).asInstanceOf[Symbol] - if (c eq null) { - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start) - val name = getExternalName(in.getChar(start + 1)) - if (nme.isModuleName(name)) - c = rootMirror.getModuleByName(name.dropModule) - else - c = classNameToSymbol(name) - - values(index) = c + values(index) match { + case sym: Symbol => sym + case _ => + val result = getClassName(index) match { + case name if nme.isModuleName(name) => rootMirror getModuleByName name.dropModule + case name => classNameToSymbol(name) + } + recordAtIndex(result, index) } - c } /** Return the external name of the class info structure found at 'index'. * Use 'getClassSymbol' if the class is sure to be a top-level class. */ def getClassName(index: Int): Name = { - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start) - getExternalName(in.getChar(start + 1)) + val start = firstExpecting(index, CONSTANT_CLASS) + getExternalName(in getChar start) } /** Return the symbol of the class member at `index`. @@ -276,94 +289,66 @@ abstract class ClassfileParser { */ private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = { if (index <= 0 || len <= index) errorBadIndex(index) - var p = values(index).asInstanceOf[(Name, Type)] - if (p eq null) { - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start) - val name = getName(in.getChar(start + 1).toInt) - // create a dummy symbol for method types - val dummySym = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos) - var tpe = getType(dummySym, in.getChar(start + 3).toInt) - - // fix the return type, which is blindly set to the class currently parsed - if (name == nme.CONSTRUCTOR) - tpe match { - case MethodType(formals, restpe) => - tpe = MethodType(formals, ownerTpe) + (values(index): @unchecked) match { + case p: ((Name, Type)) => p + case _ => + val start = firstExpecting(index, CONSTANT_NAMEANDTYPE) + val name = getName(in.getChar(start).toInt) + // create a dummy symbol for method types + val dummy = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos) + val tpe = getType(dummy, in.getChar(start + 2).toInt) + // fix the return type, which is blindly set to the class currently parsed + val restpe = tpe match { + case MethodType(formals, _) if name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe) + case _ => tpe } - - p = (name, tpe) + ((name, restpe)) } - p } /** Return the type of a class constant entry. Since * arrays are considered to be class types, they might * appear as entries in 'newarray' or 'cast' opcodes. */ - def getClassOrArrayType(index: Int): Type = { + def getClassOrArrayType(index: Int): Type = ( if (index <= 0 || len <= index) errorBadIndex(index) - val value = values(index) - var c: Type = null - if (value eq null) { - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start) - val name = getExternalName(in.getChar(start + 1)) - if (name.charAt(0) == ARRAY_TAG) { - c = sigToType(null, name) - values(index) = c - } else { - val sym = classNameToSymbol(name) - /*if (name.endsWith("$")) definitions.getModule(name.subName(0, name.length - 1)) - else if (name.endsWith("$class")) definitions.getModule(name) - else definitions.getClass(name)*/ - values(index) = sym - c = sym.tpe - } - } else c = value match { - case tp: Type => tp - case cls: Symbol => cls.tpe + else values(index) match { + case tp: Type => tp + case cls: Symbol => cls.tpe_* + case _ => + val name = getClassName(index) + name charAt 0 match { + case ARRAY_TAG => recordAtIndex(sigToType(null, name), index) + case _ => recordAtIndex(classNameToSymbol(name), index).tpe_* + } } - c - } - - def getType(index: Int): Type = getType(null, index) - - def getType(sym: Symbol, index: Int): Type = - sigToType(sym, getExternalName(index)) + ) - def getSuperClass(index: Int): Symbol = - if (index == 0) definitions.AnyClass else getClassSymbol(index) + def getType(index: Int): Type = getType(null, index) + def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index)) + def getSuperClass(index: Int): Symbol = if (index == 0) AnyClass else getClassSymbol(index) - def getConstant(index: Int): Constant = { + private def createConstant(index: Int): Constant = { + val start = starts(index) + Constant((in.buf(start).toInt: @switch) match { + case CONSTANT_STRING => getName(in.getChar(start + 1).toInt).toString + case CONSTANT_INTEGER => in.getInt(start + 1) + case CONSTANT_FLOAT => in.getFloat(start + 1) + case CONSTANT_LONG => in.getLong(start + 1) + case CONSTANT_DOUBLE => in.getDouble(start + 1) + case CONSTANT_CLASS => getClassOrArrayType(index).typeSymbol.tpe_* // !!! Is this necessary or desirable? + case _ => errorBadTag(start) + }) + } + def getConstant(index: Int): Constant = ( if (index <= 0 || len <= index) errorBadIndex(index) - var value = values(index) - if (value eq null) { - val start = starts(index) - value = (in.buf(start).toInt: @switch) match { - case CONSTANT_STRING => - Constant(getName(in.getChar(start + 1).toInt).toString) - case CONSTANT_INTEGER => - Constant(in.getInt(start + 1)) - case CONSTANT_FLOAT => - Constant(in.getFloat(start + 1)) - case CONSTANT_LONG => - Constant(in.getLong(start + 1)) - case CONSTANT_DOUBLE => - Constant(in.getDouble(start + 1)) - case CONSTANT_CLASS => - getClassOrArrayType(index).typeSymbol - case _ => - errorBadTag(start) - } - values(index) = value - } - value match { - case ct: Constant => ct - case cls: Symbol => Constant(cls.tpe_*) - case arr: Type => Constant(arr) + else values(index) match { + case const: Constant => const + case sym: Symbol => Constant(sym.tpe_*) + case tpe: Type => Constant(tpe) + case _ => recordAtIndex(createConstant(index), index) } - } + ) private def getSubArray(bytes: Array[Byte]): Array[Byte] = { val decodedLength = ByteCodecs.decode(bytes) @@ -372,46 +357,41 @@ abstract class ClassfileParser { arr } - def getBytes(index: Int): Array[Byte] = { + def getBytes(index: Int): Array[Byte] = ( if (index <= 0 || len <= index) errorBadIndex(index) - var value = values(index).asInstanceOf[Array[Byte]] - if (value eq null) { - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start) - val len = in.getChar(start + 1) - val bytes = new Array[Byte](len) - System.arraycopy(in.buf, start + 3, bytes, 0, len) - value = getSubArray(bytes) - values(index) = value + else values(index) match { + case xs: Array[Byte] => xs + case _ => + val start = firstExpecting(index, CONSTANT_UTF8) + val len = in getChar start + val bytes = new Array[Byte](len) + System.arraycopy(in.buf, start + 2, bytes, 0, len) + recordAtIndex(getSubArray(bytes), index) } - value - } + ) def getBytes(indices: List[Int]): Array[Byte] = { - assert(!indices.isEmpty, indices) - var value = values(indices.head).asInstanceOf[Array[Byte]] - if (value eq null) { - val bytesBuffer = ArrayBuffer.empty[Byte] - for (index <- indices) { - if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index) - val start = starts(index) - if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start) - val len = in.getChar(start + 1) - bytesBuffer ++= in.buf.view(start + 3, start + 3 + len) - } - value = getSubArray(bytesBuffer.toArray) - values(indices.head) = value + val head = indices.head + values(head) match { + case xs: Array[Byte] => xs + case _ => + val arr: Array[Byte] = indices.toArray flatMap { index => + if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index) + val start = firstExpecting(index, CONSTANT_UTF8) + val len = in getChar start + in.buf drop start + 2 take len + } + recordAtIndex(getSubArray(arr), head) } - value } /** Throws an exception signaling a bad constant index. */ private def errorBadIndex(index: Int) = - throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp) + abort(s"bad constant pool index: $index at pos: ${in.bp}") /** Throws an exception signaling a bad tag at given address. */ private def errorBadTag(start: Int) = - throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start) + abort("bad constant pool tag ${in.buf(start)} at byte $start") } /** Try to force the chain of enclosing classes for the given name. Otherwise @@ -490,30 +470,27 @@ abstract class ClassfileParser { catch { case _: FatalError => loadClassSymbol(name) } } - var sawPrivateConstructor = false - def parseClass() { - val jflags = in.nextChar - val isAnnotation = hasAnnotation(jflags) - val sflags = toScalaClassFlags(jflags) - val nameIdx = in.nextChar - currentClass = pool.getClassName(nameIdx) + val jflags = readClassFlags() + val sflags = jflags.toScalaFlags + val nameIdx = u2 + currentClass = pool.getClassName(nameIdx) /* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { if (isScala) { - in.nextChar // skip superclass - val ifaces = in.nextChar - in.bp += ifaces * 2 // .. and iface count interfaces - List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information + u2 // skip superclass + val ifaces = u2 + in.bp += ifaces * 2 // .. and iface count interfaces + List(AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information } else raiseLoaderLevel { - val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe } - else pool.getSuperClass(in.nextChar).tpe_* - val ifaceCount = in.nextChar - var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe_* - if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces + val superType = if (jflags.isAnnotation) { u2; AnnotationClass.tpe } + else pool.getSuperClass(u2).tpe_* + val ifaceCount = u2 + var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(u2).tpe_* + if (jflags.isAnnotation) ifaces ::= ClassfileAnnotationClass.tpe superType :: ifaces } } @@ -543,21 +520,20 @@ abstract class ClassfileParser { skipMembers() // methods if (!isScala) { clazz setFlag sflags - importPrivateWithinFromJavaFlags(clazz, jflags) - importPrivateWithinFromJavaFlags(staticModule, jflags) - clazz.setInfo(classInfo) + propagatePackageBoundary(jflags, clazz, staticModule) + clazz setInfo classInfo moduleClass setInfo staticInfo - staticModule.setInfo(moduleClass.tpe) - staticModule.setFlag(JAVA) - staticModule.moduleClass.setFlag(JAVA) + staticModule setInfo moduleClass.tpe + staticModule setFlag JAVA + staticModule.moduleClass setFlag JAVA // attributes now depend on having infos set already parseAttributes(clazz, classInfo) def queueLoad() { in.bp = curbp - 0 until in.nextChar foreach (_ => parseField()) + 0 until u2 foreach (_ => parseField()) sawPrivateConstructor = false - 0 until in.nextChar foreach (_ => parseMethod()) + 0 until u2 foreach (_ => parseMethod()) val needsConstructor = ( !sawPrivateConstructor && !(instanceScope containsName nme.CONSTRUCTOR) @@ -593,26 +569,28 @@ abstract class ClassfileParser { } def parseField() { - val jflags = in.nextChar - val sflags = toScalaFieldFlags(jflags) - if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) { + val jflags = readFieldFlags() + val sflags = jflags.toScalaFlags + + if ((sflags & PRIVATE) != 0L && !optimized) { in.skip(4); skipAttributes() } else { - val name = pool.getName(in.nextChar) - val info = pool.getType(in.nextChar) - val sym = getOwner(jflags).newValue(name.toTermName, NoPosition, sflags) - val isEnum = (jflags & JAVA_ACC_ENUM) != 0 + val name = readName() + val info = readType() + val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags) + // Note: the info may be overrwritten later with a generic signature + // parsed from SignatureATTR sym setInfo { - if (isEnum) ConstantType(Constant(sym)) + if (jflags.isEnum) ConstantType(Constant(sym)) else info } - importPrivateWithinFromJavaFlags(sym, jflags) + propagatePackageBoundary(jflags, sym) parseAttributes(sym, info) - getScope(jflags).enter(sym) + getScope(jflags) enter sym // sealed java enums - if (isEnum) { + if (jflags.isEnum) { val enumClass = sym.owner.linkedClassOfClass if (!enumClass.isSealed) enumClass setFlag (SEALED | ABSTRACT) @@ -623,26 +601,27 @@ abstract class ClassfileParser { } def parseMethod() { - val jflags = in.nextChar.toInt - val sflags = toScalaMethodFlags(jflags) - if (isPrivate(jflags) && !global.settings.optimise.value) { - val name = pool.getName(in.nextChar) + val jflags = readMethodFlags() + val sflags = jflags.toScalaFlags + if (jflags.isPrivate && !optimized) { + val name = readName() if (name == nme.CONSTRUCTOR) sawPrivateConstructor = true in.skip(2); skipAttributes() - } else { - if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) { + } + else { + if ((sflags & PRIVATE) != 0L && optimized) { in.skip(4); skipAttributes() } else { - val name = pool.getName(in.nextChar) - val sym = getOwner(jflags).newMethod(name.toTermName, NoPosition, sflags) - var info = pool.getType(sym, (in.nextChar)) + val name = readName() + val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags) + var info = pool.getType(sym, u2) if (name == nme.CONSTRUCTOR) info match { case MethodType(params, restpe) => // if this is a non-static inner class, remove the explicit outer parameter val newParams = innerClasses getEntry currentClass match { - case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) => + case Some(entry) if !isScalaRaw && !entry.jflags.isStatic => /* About `clazz.owner.isPackage` below: SI-5957 * For every nested java class A$B, there are two symbols in the scala compiler. * 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package @@ -657,13 +636,15 @@ abstract class ClassfileParser { } info = MethodType(newParams, clazz.tpe) } - sym.setInfo(info) - importPrivateWithinFromJavaFlags(sym, jflags) + // Note: the info may be overrwritten later with a generic signature + // parsed from SignatureATTR + sym setInfo info + propagatePackageBoundary(jflags, sym) parseAttributes(sym, info) - if ((jflags & JAVA_ACC_VARARGS) != 0) { - sym.setInfo(arrayToRepeated(sym.info)) - } - getScope(jflags).enter(sym) + if (jflags.isVarargs) + sym modifyInfo arrayToRepeated + + getScope(jflags) enter sym } } } @@ -683,15 +664,15 @@ abstract class ClassfileParser { def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = { val tag = sig.charAt(index); index += 1 tag match { - case BYTE_TAG => definitions.ByteClass.tpe - case CHAR_TAG => definitions.CharClass.tpe - case DOUBLE_TAG => definitions.DoubleClass.tpe - case FLOAT_TAG => definitions.FloatClass.tpe - case INT_TAG => definitions.IntClass.tpe - case LONG_TAG => definitions.LongClass.tpe - case SHORT_TAG => definitions.ShortClass.tpe - case VOID_TAG => definitions.UnitClass.tpe - case BOOL_TAG => definitions.BooleanClass.tpe + case BYTE_TAG => ByteClass.tpe + case CHAR_TAG => CharClass.tpe + case DOUBLE_TAG => DoubleClass.tpe + case FLOAT_TAG => FloatClass.tpe + case INT_TAG => IntClass.tpe + case LONG_TAG => LongClass.tpe + case SHORT_TAG => ShortClass.tpe + case VOID_TAG => UnitClass.tpe + case BOOL_TAG => BooleanClass.tpe case 'L' => def processInner(tp: Type): Type = tp match { case TypeRef(pre, sym, args) if (!sym.isStatic) => @@ -716,7 +697,7 @@ abstract class ClassfileParser { val tp = sig2type(tparams, skiptvs) // sig2type seems to return AnyClass regardless of the situation: // we don't want Any as a LOWER bound. - if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty + if (tp.typeSymbol == AnyClass) TypeBounds.empty else TypeBounds.lower(tp) case '*' => TypeBounds.empty } @@ -737,7 +718,7 @@ abstract class ClassfileParser { // or we'll create a boatload of needless existentials. else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp // raw type - existentially quantify all type parameters - else logResult(s"raw type from $classSym")(definitions.unsafeClassExistentialType(classSym)) + else logResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym)) case tp => assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp") tp @@ -750,7 +731,9 @@ abstract class ClassfileParser { accept('.') val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName val clazz = tpe.member(name) - tpe = processClassType(processInner(clazz.tpe)) + val dummyArgs = Nil // the actual arguments are added in processClassType + val inner = typeRef(pre = tpe, sym = clazz, args = dummyArgs) + tpe = processClassType(inner) } accept(';') tpe @@ -763,11 +746,11 @@ abstract class ClassfileParser { // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail // see also RestrictJavaArraysMap (when compiling java sources directly) - if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) { - elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe)) + if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectClass.tpe)) { + elemtp = intersectionType(List(elemtp, ObjectClass.tpe)) } - definitions.arrayType(elemtp) + arrayType(elemtp) case '(' => // we need a method symbol. given in line 486 by calling getType(methodSym, ..) assert(sym ne null, sig) @@ -785,7 +768,7 @@ abstract class ClassfileParser { case 'T' => val n = subName(';'.==).toTypeName index += 1 - if (skiptvs) definitions.AnyClass.tpe + if (skiptvs) AnyClass.tpe else tparams(n).typeConstructor } } // sig2type(tparams, skiptvs) @@ -838,27 +821,23 @@ abstract class ClassfileParser { GenPolyType(ownTypeParams, tpe) } // sigToType - class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter { - override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") } - } - def parseAttributes(sym: Symbol, symtype: Type) { def convertTo(c: Constant, pt: Type): Constant = { - if (pt.typeSymbol == definitions.BooleanClass && c.tag == IntTag) + if (pt.typeSymbol == BooleanClass && c.tag == IntTag) Constant(c.value != 0) else c convertTo pt } def parseAttribute() { - val attrName = pool.getName(in.nextChar).toTypeName - val attrLen = in.nextInt + val attrName = readTypeName() + val attrLen = u4 attrName match { case tpnme.SignatureATTR => if (!isScala && !isScalaRaw) { - val sig = pool.getExternalName(in.nextChar) + val sig = pool.getExternalName(u2) val newType = sigToType(sym, sig) sym.setInfo(newType) - if (settings.debug.value && settings.verbose.value) + if (settings.debug && settings.verbose) println("" + sym + "; signature = " + sig + " type = " + newType) } else in.skip(attrLen) @@ -870,10 +849,10 @@ abstract class ClassfileParser { in.skip(attrLen) case tpnme.DeprecatedATTR => val arg = Literal(Constant("see corresponding Javadoc for more information.")) - sym.addAnnotation(definitions.DeprecatedAttr, arg, Literal(Constant(""))) + sym.addAnnotation(DeprecatedAttr, arg, Literal(Constant(""))) in.skip(attrLen) case tpnme.ConstantValueATTR => - val c = pool.getConstant(in.nextChar) + val c = pool.getConstant(u2) val c1 = convertTo(c, symtype) if (c1 ne null) sym.setInfo(ConstantType(c1)) else println("failure to convert " + c + " to " + symtype); //debug @@ -887,7 +866,7 @@ abstract class ClassfileParser { isScalaRaw = true // Attribute on methods of java annotation classes when that method has a default case tpnme.AnnotationDefaultATTR => - sym.addAnnotation(definitions.AnnotationDefaultAttr) + sym.addAnnotation(AnnotationDefaultAttr) in.skip(attrLen) // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME case tpnme.RuntimeAnnotationATTR => @@ -917,7 +896,7 @@ abstract class ClassfileParser { parseExceptions(attrLen) case tpnme.SourceFileATTR => - val srcfileLeaf = pool.getName(in.nextChar).toString.trim + val srcfileLeaf = readName().toString.trim val srcpath = sym.enclosingPackage match { case NoSymbol => srcfileLeaf case rootMirror.EmptyPackage => srcfileLeaf @@ -930,8 +909,8 @@ abstract class ClassfileParser { } def parseAnnotArg: Option[ClassfileAnnotArg] = { - val tag = in.nextByte.toChar - val index = in.nextChar + val tag = u1 + val index = u2 tag match { case STRING_TAG => Some(LiteralAnnotArg(Constant(pool.getName(index).toString))) @@ -942,7 +921,7 @@ abstract class ClassfileParser { Some(LiteralAnnotArg(Constant(pool.getType(index)))) case ENUM_TAG => val t = pool.getType(index) - val n = pool.getName(in.nextChar) + val n = readName() val s = t.typeSymbol.companionModule.info.decls.lookup(n) assert(s != NoSymbol, t) Some(LiteralAnnotArg(Constant(s))) @@ -962,20 +941,20 @@ abstract class ClassfileParser { } def parseScalaSigBytes: Option[ScalaSigBytes] = { - val tag = in.nextByte.toChar + val tag = u1 assert(tag == STRING_TAG, tag) - Some(ScalaSigBytes(pool getBytes in.nextChar)) + Some(ScalaSigBytes(pool getBytes u2)) } def parseScalaLongSigBytes: Option[ScalaSigBytes] = { - val tag = in.nextByte.toChar + val tag = u1 assert(tag == ARRAY_TAG, tag) - val stringCount = in.nextChar + val stringCount = u2 val entries = for (i <- 0 until stringCount) yield { - val stag = in.nextByte.toChar + val stag = u1 assert(stag == STRING_TAG, stag) - in.nextChar.toInt + u2.toInt } Some(ScalaSigBytes(pool.getBytes(entries.toList))) } @@ -985,20 +964,20 @@ abstract class ClassfileParser { */ def parseAnnotation(attrNameIndex: Char): Option[AnnotationInfo] = try { val attrType = pool.getType(attrNameIndex) - val nargs = in.nextChar + val nargs = u2 val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)] var hasError = false for (i <- 0 until nargs) { - val name = pool.getName(in.nextChar) + val name = readName() // The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is // available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature // is encoded as a string because of limitations in the Java class file format. - if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes)) + if ((attrType == ScalaSignatureAnnotation.tpe) && (name == nme.bytes)) parseScalaSigBytes match { case Some(c) => nvpairs += ((name, c)) case None => hasError = true } - else if ((attrType == definitions.ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes)) + else if ((attrType == ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes)) parseScalaLongSigBytes match { case Some(c) => nvpairs += ((name, c)) case None => hasError = true @@ -1021,7 +1000,7 @@ abstract class ClassfileParser { // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), // and that should never be swallowed silently. warning("Caught: " + ex + " while parsing annotations in " + in.file) - if (settings.debug.value) ex.printStackTrace() + if (settings.debug) ex.printStackTrace() None // ignore malformed annotations } @@ -1031,10 +1010,10 @@ abstract class ClassfileParser { * thrown by a method. */ def parseExceptions(len: Int) { - val nClasses = in.nextChar + val nClasses = u2 for (n <- 0 until nClasses) { // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065) - val cls = pool.getClassSymbol(in.nextChar.toInt) + val cls = pool.getClassSymbol(u2) // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation // and that method requires Symbol to be forced to give the right answers, see SI-7107 for details cls.initialize @@ -1045,13 +1024,13 @@ abstract class ClassfileParser { /* Parse a sequence of annotations and attaches them to the * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */ def parseAnnotations(len: Int): Option[AnnotationInfo] = { - val nAttr = in.nextChar + val nAttr = u2 var scalaSigAnnot: Option[AnnotationInfo] = None for (n <- 0 until nAttr) - parseAnnotation(in.nextChar) match { - case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) => + parseAnnotation(u2) match { + case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) => scalaSigAnnot = Some(scalaSig) - case Some(scalaSig) if (scalaSig.atp == definitions.ScalaLongSignatureAnnotation.tpe) => + case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) => scalaSigAnnot = Some(scalaSig) case Some(annot) => sym.addAnnotation(annot) @@ -1061,7 +1040,7 @@ abstract class ClassfileParser { } // begin parseAttributes - for (i <- 0 until in.nextChar) parseAttribute() + for (i <- 0 until u2) parseAttribute() } /** Enter own inner classes in the right scope. It needs the scopes to be set up, @@ -1071,11 +1050,12 @@ abstract class ClassfileParser { def className(name: Name): Name = name.subName(name.lastPos('.') + 1, name.length) - def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) { + def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) { + def jflags = entry.jflags val completer = new global.loaders.ClassfileLoader(file) val name = entry.originalName - val sflags = toScalaClassFlags(jflags) - val owner = getOwner(jflags) + val sflags = jflags.toScalaFlags + val owner = ownerForFlags(jflags) val scope = getScope(jflags) val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer @@ -1104,7 +1084,7 @@ abstract class ClassfileParser { val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse { throw new AssertionError(entry.externalName) } - enterClassAndModule(entry, file, entry.jflags) + enterClassAndModule(entry, file) } } } @@ -1117,10 +1097,10 @@ abstract class ClassfileParser { skipSuperclasses() skipMembers() // fields skipMembers() // methods - val attrs = in.nextChar + val attrs = u2 for (i <- 0 until attrs) { - val attrName = pool.getName(in.nextChar).toTypeName - val attrLen = in.nextInt + val attrName = readTypeName() + val attrLen = u4 attrName match { case tpnme.SignatureATTR => in.skip(attrLen) @@ -1134,9 +1114,10 @@ abstract class ClassfileParser { case tpnme.ScalaATTR => isScalaRaw = true case tpnme.InnerClassesATTR if !isScala => - val entries = in.nextChar.toInt + val entries = u2 for (i <- 0 until entries) { - val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt + val innerIndex, outerIndex, nameIndex = u2 + val jflags = readInnerClassFlags() if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags) } @@ -1148,14 +1129,13 @@ abstract class ClassfileParser { } /** An entry in the InnerClasses attribute of this class file. */ - case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) { + case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: JavaAccFlags) { def externalName = pool getClassName external def outerName = pool getClassName outer def originalName = pool getName name - def isStatic = ClassfileParser.this.isStatic(jflags) def isModule = originalName.isTermName - def scope = if (isStatic) staticScope else instanceScope - def enclosing = if (isStatic) enclModule else enclClass + def scope = if (jflags.isStatic) staticScope else instanceScope + def enclosing = if (jflags.isStatic) enclModule else enclClass // The name of the outer class, without its trailing $ if it has one. private def strippedOuter = nme stripModuleSuffix outerName @@ -1207,6 +1187,9 @@ abstract class ClassfileParser { } } + class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter { + override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") } + } class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter { override def complete(sym: Symbol) { sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun) @@ -1214,16 +1197,16 @@ abstract class ClassfileParser { } def skipAttributes() { - var attrCount: Int = in.nextChar + var attrCount: Int = u2 while (attrCount > 0) { in skip 2 - in skip in.nextInt + in skip u4 attrCount -= 1 } } def skipMembers() { - var memberCount: Int = in.nextChar + var memberCount: Int = u2 while (memberCount > 0) { in skip 6 skipAttributes() @@ -1233,17 +1216,10 @@ abstract class ClassfileParser { def skipSuperclasses() { in.skip(2) // superclass - val ifaces = in.nextChar + val ifaces = u2 in.skip(2 * ifaces) } - protected def getOwner(flags: Int): Symbol = - if (isStatic(flags)) moduleClass else clazz - - protected def getScope(flags: Int): Scope = - if (isStatic(flags)) staticScope else instanceScope - - private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0 - private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0 - private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0 + protected def getScope(flags: JavaAccFlags): Scope = + if (flags.isStatic) staticScope else instanceScope } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 599823b408..50487ad123 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -10,6 +10,7 @@ package classfile import scala.collection.{ mutable, immutable } import mutable.ListBuffer import ClassfileConstants._ +import scala.reflect.internal.JavaAccFlags /** ICode reader from Java bytecode. * @@ -45,26 +46,19 @@ abstract class ICodeReader extends ClassfileParser { (staticCode, instanceCode) } - /** If we're parsing a scala module, the owner of members is always - * the module symbol. - */ - override def getOwner(jflags: Int): Symbol = - if (isScalaModule) this.staticModule - else super.getOwner(jflags) - override def parseClass() { this.instanceCode = new IClass(clazz) this.staticCode = new IClass(staticModule) - in.nextChar - pool getClassSymbol in.nextChar + u2 + pool getClassSymbol u2 parseInnerClasses() in.skip(2) // super class - in.skip(2 * in.nextChar) // interfaces - val fieldCount = in.nextChar + in.skip(2 * u2) // interfaces + val fieldCount = u2 for (i <- 0 until fieldCount) parseField() - val methodCount = in.nextChar + val methodCount = u2 for (i <- 0 until methodCount) parseMethod() instanceCode.methods = instanceCode.methods.reverse staticCode.methods = staticCode.methods.reverse @@ -76,25 +70,31 @@ abstract class ICodeReader extends ClassfileParser { skipAttributes() } - private def parseMember(field: Boolean): (Int, Symbol) = { - val jflags = in.nextChar - val name = pool getName in.nextChar - val owner = getOwner(jflags) - val dummySym = owner.newMethod(name.toTermName, owner.pos, toScalaMethodFlags(jflags)) + private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = { + val jflags = JavaAccFlags(u2) + val name = pool getName u2 + /** If we're parsing a scala module, the owner of members is always + * the module symbol. + */ + val owner = ( + if (isScalaModule) staticModule + else if (jflags.isStatic) moduleClass + else clazz + ) + val dummySym = owner.newMethod(name.toTermName, owner.pos, jflags.toScalaFlags) try { - val ch = in.nextChar + val ch = u2 val tpe = pool.getType(dummySym, ch) if ("<clinit>" == name.toString) (jflags, NoSymbol) else { - val owner = getOwner(jflags) var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe)) if (sym == NoSymbol) sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) if (sym == NoSymbol) { - sym = if (field) owner.newValue(name.toTermName, owner.pos, toScalaFieldFlags(jflags)) else dummySym + sym = if (field) owner.newValue(name.toTermName, owner.pos, jflags.toScalaFlags) else dummySym sym setInfoAndEnter tpe log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.") } @@ -126,9 +126,9 @@ abstract class ICodeReader extends ClassfileParser { this.method = new IMethod(sym) this.method.returnType = toTypeKind(sym.tpe.resultType) getCode(jflags).addMethod(this.method) - if ((jflags & JAVA_ACC_NATIVE) != 0) + if (jflags.isNative) this.method.native = true - val attributeCount = in.nextChar + val attributeCount = u2 for (i <- 0 until attributeCount) parseAttribute() } else { debuglog("Skipping non-existent method.") @@ -142,8 +142,8 @@ abstract class ICodeReader extends ClassfileParser { } def parseAttribute() { - val attrName = pool.getName(in.nextChar).toTypeName - val attrLen = in.nextInt + val attrName = pool.getName(u2).toTypeName + val attrLen = u4 attrName match { case tpnme.CodeATTR => parseByteCode() @@ -187,9 +187,9 @@ abstract class ICodeReader extends ClassfileParser { /** Parse java bytecode into ICode */ def parseByteCode() { - maxStack = in.nextChar - maxLocals = in.nextChar - val codeLength = in.nextInt + maxStack = u2 + maxLocals = u2 + val codeLength = u4 val code = new LinearCode def parseInstruction() { @@ -200,7 +200,7 @@ abstract class ICodeReader extends ClassfileParser { /* Parse 16 bit jump target. */ def parseJumpTarget = { size += 2 - val offset = in.nextChar.toShort + val offset = u2.toShort val target = pc + offset assert(target >= 0 && target < codeLength, "Illegal jump target: " + target) target @@ -209,14 +209,13 @@ abstract class ICodeReader extends ClassfileParser { /* Parse 32 bit jump target. */ def parseJumpTargetW: Int = { size += 4 - val offset = in.nextInt + val offset = u4 val target = pc + offset assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset) target } - val instr = toUnsignedByte(in.nextByte) - instr match { + u1 match { case JVM.nop => parseInstruction() case JVM.aconst_null => code emit CONSTANT(Constant(null)) case JVM.iconst_m1 => code emit CONSTANT(Constant(-1)) @@ -235,17 +234,17 @@ abstract class ICodeReader extends ClassfileParser { case JVM.dconst_0 => code emit CONSTANT(Constant(0.0)) case JVM.dconst_1 => code emit CONSTANT(Constant(1.0)) - case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size += 1 - case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size += 2 - case JVM.ldc => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size += 1 - case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2 - case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2 - case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT))); size += 1 - case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1 - case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1 - case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1 + case JVM.bipush => code.emit(CONSTANT(Constant(u1))); size += 1 + case JVM.sipush => code.emit(CONSTANT(Constant(u2))); size += 2 + case JVM.ldc => code.emit(CONSTANT(pool.getConstant(u1))); size += 1 + case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2 + case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2 + case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u1, INT))); size += 1 + case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u1, LONG))); size += 1 + case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u1, FLOAT))); size += 1 + case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u1, DOUBLE))); size += 1 case JVM.aload => - val local = in.nextByte.toInt; size += 1 + val local = u1.toInt; size += 1 if (local == 0 && !method.isStatic) code.emit(THIS(method.symbol.owner)) else @@ -285,11 +284,11 @@ abstract class ICodeReader extends ClassfileParser { case JVM.caload => code.emit(LOAD_ARRAY_ITEM(CHAR)) case JVM.saload => code.emit(LOAD_ARRAY_ITEM(SHORT)) - case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT))); size += 1 - case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1 - case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1 - case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1 - case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, ObjectReference))); size += 1 + case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u1, INT))); size += 1 + case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u1, LONG))); size += 1 + case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u1, FLOAT))); size += 1 + case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u1, DOUBLE))); size += 1 + case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u1, ObjectReference))); size += 1 case JVM.istore_0 => code.emit(STORE_LOCAL(code.getLocal(0, INT))) case JVM.istore_1 => code.emit(STORE_LOCAL(code.getLocal(1, INT))) case JVM.istore_2 => code.emit(STORE_LOCAL(code.getLocal(2, INT))) @@ -373,9 +372,9 @@ abstract class ICodeReader extends ClassfileParser { case JVM.lxor => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG))) case JVM.iinc => size += 2 - val local = code.getLocal(in.nextByte, INT) + val local = code.getLocal(u1, INT) code.emit(LOAD_LOCAL(local)) - code.emit(CONSTANT(Constant(in.nextByte))) + code.emit(CONSTANT(Constant(u1))) code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT))) code.emit(STORE_LOCAL(local)) @@ -425,14 +424,14 @@ abstract class ICodeReader extends ClassfileParser { size += padding in.bp += padding assert((pc + size % 4) != 0, pc) -/* var byte1 = in.nextByte; size += 1; - while (byte1 == 0) { byte1 = in.nextByte; size += 1; } - val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte; +/* var byte1 = u1; size += 1; + while (byte1 == 0) { byte1 = u1; size += 1; } + val default = byte1 << 24 | u1 << 16 | u1 << 8 | u1; size = size + 3 */ - val default = pc + in.nextInt; size += 4 - val low = in.nextInt - val high = in.nextInt + val default = pc + u4; size += 4 + val low = u4 + val high = u4 size += 8 assert(low <= high, "Value low not <= high for tableswitch.") @@ -445,13 +444,13 @@ abstract class ICodeReader extends ClassfileParser { size += padding in.bp += padding assert((pc + size % 4) != 0, pc) - val default = pc + in.nextInt; size += 4 - val npairs = in.nextInt; size += 4 + val default = pc + u4; size += 4 + val npairs = u4; size += 4 var tags: List[List[Int]] = Nil var targets: List[Int] = Nil var i = 0 while (i < npairs) { - tags = List(in.nextInt) :: tags; size += 4 + tags = List(u4) :: tags; size += 4 targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself i += 1 } @@ -466,47 +465,54 @@ abstract class ICodeReader extends ClassfileParser { case JVM.return_ => code.emit(RETURN(UNIT)) case JVM.getstatic => - val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2 + val field = pool.getMemberSymbol(u2, static = true); size += 2 if (field.hasModuleFlag) code emit LOAD_MODULE(field) else code emit LOAD_FIELD(field, isStatic = true) case JVM.putstatic => - val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2 + val field = pool.getMemberSymbol(u2, static = true); size += 2 code.emit(STORE_FIELD(field, isStatic = true)) case JVM.getfield => - val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + val field = pool.getMemberSymbol(u2, static = false); size += 2 code.emit(LOAD_FIELD(field, isStatic = false)) case JVM.putfield => - val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + val field = pool.getMemberSymbol(u2, static = false); size += 2 code.emit(STORE_FIELD(field, isStatic = false)) case JVM.invokevirtual => - val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + val m = pool.getMemberSymbol(u2, static = false); size += 2 code.emit(CALL_METHOD(m, Dynamic)) case JVM.invokeinterface => - val m = pool.getMemberSymbol(in.nextChar, static = false); size += 4 + val m = pool.getMemberSymbol(u2, static = false); size += 4 in.skip(2) code.emit(CALL_METHOD(m, Dynamic)) case JVM.invokespecial => - val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + val m = pool.getMemberSymbol(u2, static = false); size += 2 val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true) else SuperCall(m.owner.name) code.emit(CALL_METHOD(m, style)) case JVM.invokestatic => - val m = pool.getMemberSymbol(in.nextChar, static = true); size += 2 + val m = pool.getMemberSymbol(u2, static = true); size += 2 if (isBox(m)) code.emit(BOX(toTypeKind(m.info.paramTypes.head))) else if (isUnbox(m)) code.emit(UNBOX(toTypeKind(m.info.resultType))) else code.emit(CALL_METHOD(m, Static(onInstance = false))) + case JVM.invokedynamic => + // TODO, this is just a place holder. A real implementation must parse the class constant entry + debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.") + containsInvokeDynamic = true + val poolEntry = in.nextChar + in.skip(2) + code.emit(INVOKE_DYNAMIC(poolEntry)) case JVM.new_ => - code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar)))) + code.emit(NEW(REFERENCE(pool.getClassSymbol(u2)))) size += 2 case JVM.newarray => - val kind = in.nextByte match { + val kind = u1 match { case T_BOOLEAN => BOOL case T_CHAR => CHAR case T_FLOAT => FLOAT @@ -520,35 +526,35 @@ abstract class ICodeReader extends ClassfileParser { code.emit(CREATE_ARRAY(kind, 1)) case JVM.anewarray => - val tpe = pool.getClassOrArrayType(in.nextChar); size += 2 + val tpe = pool.getClassOrArrayType(u2); size += 2 code.emit(CREATE_ARRAY(toTypeKind(tpe), 1)) case JVM.arraylength => code.emit(CALL_PRIMITIVE(ArrayLength(ObjectReference))); // the kind does not matter case JVM.athrow => code.emit(THROW(definitions.ThrowableClass)) case JVM.checkcast => - code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2 + code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2 case JVM.instanceof => - code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2 + code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2 case JVM.monitorenter => code.emit(MONITOR_ENTER()) case JVM.monitorexit => code.emit(MONITOR_EXIT()) case JVM.wide => size += 1 - toUnsignedByte(in.nextByte) match { - case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT))); size += 2 - case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2 - case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2 - case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2 - case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2 - case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT))); size += 2 - case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2 - case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2 - case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2 - case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2 + u1 match { + case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u2, INT))); size += 2 + case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u2, LONG))); size += 2 + case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u2, FLOAT))); size += 2 + case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u2, DOUBLE))); size += 2 + case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(u2, ObjectReference))); size += 2 + case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u2, INT))); size += 2 + case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u2, LONG))); size += 2 + case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u2, FLOAT))); size += 2 + case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u2, DOUBLE))); size += 2 + case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u2, ObjectReference))); size += 2 case JVM.ret => sys.error("Cannot handle jsr/ret") case JVM.iinc => size += 4 - val local = code.getLocal(in.nextChar, INT) - code.emit(CONSTANT(Constant(in.nextChar))) + val local = code.getLocal(u2, INT) + code.emit(CONSTANT(Constant(u2))) code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT))) code.emit(STORE_LOCAL(local)) case _ => sys.error("Invalid 'wide' operand") @@ -556,8 +562,8 @@ abstract class ICodeReader extends ClassfileParser { case JVM.multianewarray => size += 3 - val tpe = toTypeKind(pool getClassOrArrayType in.nextChar) - val dim = in.nextByte + val tpe = toTypeKind(pool getClassOrArrayType u2) + val dim = u1 // assert(dim == 1, "Cannot handle multidimensional arrays yet.") code emit CREATE_ARRAY(tpe, dim) @@ -583,14 +589,14 @@ abstract class ICodeReader extends ClassfileParser { pc = 0 while (pc < codeLength) parseInstruction() - val exceptionEntries = in.nextChar.toInt + val exceptionEntries = u2.toInt code.containsEHs = (exceptionEntries != 0) var i = 0 while (i < exceptionEntries) { // skip start end PC in.skip(4) // read the handler PC - code.jmpTargets += in.nextChar + code.jmpTargets += u2 // skip the exception type in.skip(2) i += 1 @@ -626,10 +632,8 @@ abstract class ICodeReader extends ClassfileParser { /** Return the icode class that should include members with the given flags. * There are two possible classes, the static part and the instance part. */ - def getCode(flags: Int): IClass = - if (isScalaModule) staticCode - else if ((flags & JAVA_ACC_STATIC) != 0) staticCode - else instanceCode + def getCode(flags: JavaAccFlags): IClass = + if (isScalaModule || flags.isStatic) staticCode else instanceCode class LinearCode { val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer @@ -639,6 +643,7 @@ abstract class ICodeReader extends ClassfileParser { var containsDUPX = false var containsNEW = false var containsEHs = false + var containsInvokeDynamic = false def emit(i: Instruction) { instrs += ((pc, i)) @@ -657,6 +662,7 @@ abstract class ICodeReader extends ClassfileParser { val code = new Code(method) method.setCode(code) method.bytecodeHasEHs = containsEHs + method.bytecodeHasInvokeDynamic = containsInvokeDynamic var bb = code.startBlock def makeBasicBlocks: mutable.Map[Int, BasicBlock] = diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 9217bbeeb8..94880c4b2e 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -239,7 +239,7 @@ abstract class Pickler extends SubComponent { putSymbols(tparams) case AnnotatedType(annotations, underlying, selfsym) => putType(underlying) - if (settings.selfInAnnots.value) putSymbol(selfsym) + if (settings.selfInAnnots) putSymbol(selfsym) putAnnotations(annotations filter (_.isStatic)) case _ => throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")") @@ -643,7 +643,7 @@ abstract class Pickler extends SubComponent { annotations filter (_.isStatic) match { case Nil => writeBody(tp) // write the underlying type if there are no annotations case staticAnnots => - if (settings.selfInAnnots.value && selfsym != NoSymbol) + if (settings.selfInAnnots && selfsym != NoSymbol) writeRef(selfsym) writeRef(tp) writeRefs(staticAnnots) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 3b9cee2d88..ac18e5ba4f 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -85,7 +85,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { def transformApplyDynamic(ad: ApplyDynamic) = { val qual0 = ad.qual val params = ad.args - if (settings.logReflectiveCalls.value) + if (settings.logReflectiveCalls) unit.echo(ad.pos, "method invocation uses reflection") val typedPos = typedWithPos(ad.pos) _ @@ -473,7 +473,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { /* For testing purposes, the dynamic application's condition * can be printed-out in great detail. Remove? */ - if (settings.debug.value) { + if (settings.debug) { def paramsToString(xs: Any*) = xs map (_.toString) mkString ", " val mstr = ad.symbol.tpe match { case MethodType(mparams, resType) => diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index e6cf5e6346..ac79c60254 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -66,8 +66,8 @@ abstract class Erasure extends AddInterfaces } } - override protected def verifyJavaErasure = settings.Xverify.value || settings.debug.value - def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp) + override protected def verifyJavaErasure = settings.Xverify || settings.debug + def needsJavaSig(tp: Type) = !settings.Ynogenericsig && NeedsSigCollector.collect(tp) // only refer to type params that will actually make it into the sig, this excludes: // * higher-order type parameters @@ -418,7 +418,7 @@ abstract class Erasure extends AddInterfaces |both have erased type ${exitingPostErasure(bridge.tpe)}""") } for (bc <- root.baseClasses) { - if (settings.debug.value) + if (settings.debug) exitingPostErasure(println( sm"""check bridge overrides in $bc |${bc.info.nonPrivateDecl(bridge.name)} @@ -648,7 +648,7 @@ abstract class Erasure extends AddInterfaces * @return the adapted tree */ private def adaptToType(tree: Tree, pt: Type): Tree = { - if (settings.debug.value && pt != WildcardType) + if (settings.debug && pt != WildcardType) log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug if (tree.tpe <:< pt) tree diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 124dd6c995..367825c251 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -41,26 +41,24 @@ abstract class ExplicitOuter extends InfoTransform private def isInner(clazz: Symbol) = !clazz.isPackageClass && !clazz.outerClass.isStaticOwner - private def haveSameOuter(parent: Type, clazz: Symbol) = parent match { - case TypeRef(pre, sym, _) => - val owner = clazz.owner + private def haveSameOuter(parent: Type, clazz: Symbol) = { + val owner = clazz.owner + val parentSym = parent.typeSymbol - //println(s"have same outer $parent $clazz $sym ${sym.owner} $owner $pre") - - sym.isClass && owner.isClass && - (owner isSubClass sym.owner) && - owner.thisType =:= pre - - case _ => false + parentSym.isClass && owner.isClass && + (owner isSubClass parentSym.owner) && + owner.thisType =:= parent.prefix } /** Does given clazz define an outer field? */ def hasOuterField(clazz: Symbol) = { - val parents = clazz.info.parents + val parent = clazz.info.firstParent - isInner(clazz) && !clazz.isTrait && { - parents.isEmpty || !haveSameOuter(parents.head, clazz) - } + // space optimization: inherit the $outer pointer from the parent class if + // we know that it will point to the correct instance. + def canReuseParentOuterField = !parent.typeSymbol.isJavaDefined && haveSameOuter(parent, clazz) + + isInner(clazz) && !clazz.isTrait && !canReuseParentOuterField } private def outerField(clazz: Symbol): Symbol = { @@ -100,6 +98,29 @@ abstract class ExplicitOuter extends InfoTransform sym setInfo clazz.outerClass.thisType } + /** + * Will the outer accessor of the `clazz` subsume the outer accessor of + * `mixin`? + * + * This arises when an inner object mixes in its companion trait. + * + * {{{ + * class C { + * trait T { C.this } // C$T$$$outer$ : C + * object T extends T { C.this } // C$T$$$outer$ : C.this.type + * } + * }}} + * + * See SI-7242. + }} + */ + private def skipMixinOuterAccessor(clazz: Symbol, mixin: Symbol) = { + // Reliant on the current scheme for name expansion, the expanded name + // of the outer accessors in a trait and its companion object are the same. + // If the assumption is one day falsified, run/t7424.scala will let us know. + clazz.fullName == mixin.fullName + } + /** <p> * The type transformation method: * </p> @@ -162,10 +183,14 @@ abstract class ExplicitOuter extends InfoTransform for (mc <- clazz.mixinClasses) { val mixinOuterAcc: Symbol = exitingExplicitOuter(outerAccessor(mc)) if (mixinOuterAcc != NoSymbol) { - if (decls1 eq decls) decls1 = decls.cloneScope - val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED) - newAcc setInfo (clazz.thisType memberType mixinOuterAcc) - decls1 enter newAcc + if (skipMixinOuterAccessor(clazz, mc)) + debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc") + else { + if (decls1 eq decls) decls1 = decls.cloneScope + val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED) + newAcc setInfo (clazz.thisType memberType mixinOuterAcc) + decls1 enter newAcc + } } } } @@ -370,6 +395,7 @@ abstract class ExplicitOuter extends InfoTransform val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass def mixinPrefix = (currentClass.thisType baseType mixinClass).prefix assert(outerAcc != NoSymbol, "No outer accessor for inner mixin " + mixinClass + " in " + currentClass) + assert(outerAcc.alternatives.size == 1, s"Multiple outer accessors match inner mixin $mixinClass in $currentClass : ${outerAcc.alternatives.map(_.defString)}") // I added the mixinPrefix.typeArgs.nonEmpty condition to address the // crash in SI-4970. I feel quite sure this can be improved. val path = ( @@ -404,7 +430,7 @@ abstract class ExplicitOuter extends InfoTransform } if (!currentClass.isTrait) for (mc <- currentClass.mixinClasses) - if (outerAccessor(mc) != NoSymbol) + if (outerAccessor(mc) != NoSymbol && !skipMixinOuterAccessor(currentClass, mc)) newDefs += mixinOuterAccessorDef(mc) } } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 8971e27bda..35df63b246 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -86,7 +86,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * Note: The `checkinit` option does not check if transient fields are initialized. */ private def needsInitFlag(sym: Symbol) = ( - settings.checkInit.value + settings.checkInit && sym.isGetter && !sym.isInitializedToDefault && !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY) @@ -125,7 +125,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe + " " + mixinClass + " " + base.info.baseClasses + "/" + bcs) while (!bcs.isEmpty && sym == NoSymbol) { - if (settings.debug.value) { + if (settings.debug) { val other = bcs.head.info.nonPrivateDecl(member.name) debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe + " " + other.isDeferred) @@ -919,7 +919,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else if (sym.isConstructor) { deriveDefDef(stat)(addInitBits(clazz, _)) } - else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) { + else if (settings.checkInit && !clazz.isTrait && sym.isSetter) { val getter = sym.getter(clazz) if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter)) deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT)) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 91a03009bc..565dfde11a 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -79,7 +79,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ /** For a given class and concrete type arguments, give its specialized class */ - val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap + val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol] /** Map a method symbol to a list of its specialized overloads in the same class. */ private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil @@ -1008,27 +1008,25 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE) typeEnv(om) = env addConcreteSpecMethod(overriding) - info(om) = ( - if (overriding.isDeferred) { // abstract override - debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName) - Forward(overriding) - } - else { - // if the override is a normalized member, 'om' gets the - // implementation from its original target, and adds the - // environment of the normalized member (that is, any - // specialized /method/ type parameter bindings) - val impl = info get overriding match { - case Some(NormalizedMember(target)) => - typeEnv(om) = env ++ typeEnv(overriding) - target - case _ => - overriding - } - info(overriding) = Forward(om setPos overriding.pos) - SpecialOverride(impl) + if (overriding.isDeferred) { // abstract override + debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName) + info(om) = Forward(overriding) + } + else { + // if the override is a normalized member, 'om' gets the + // implementation from its original target, and adds the + // environment of the normalized member (that is, any + // specialized /method/ type parameter bindings) + info get overriding match { + case Some(NormalizedMember(target)) => + typeEnv(om) = env ++ typeEnv(overriding) + info(om) = Forward(target) + case _ => + info(om) = SpecialOverride(overriding) } - ) + info(overriding) = Forward(om setPos overriding.pos) + } + newOverload(overriding, om, env) ifDebug(exitingSpecialize(assert( overridden.owner.info.decl(om.name) != NoSymbol, @@ -1156,7 +1154,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * If it is a 'no-specialization' run, it is applied only to loaded symbols. */ override def transformInfo(sym: Symbol, tpe: Type): Type = { - if (settings.nospecialization.value && currentRun.compiles(sym)) tpe + if (settings.nospecialization && currentRun.compiles(sym)) tpe else tpe.resultType match { case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) => val tparams = tpe.typeParams @@ -1752,21 +1750,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Create specialized class definitions */ def implSpecClasses(trees: List[Tree]): List[Tree] = { - val buf = new mutable.ListBuffer[Tree] - for (tree <- trees) - tree match { - case ClassDef(_, _, _, impl) => - tree.symbol.info // force specialization - for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) { - val parents = specCls.info.parents.map(TypeTree) - buf += - ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List())) - .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos - debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env)) - } - case _ => - } - buf.toList + trees flatMap { + case tree @ ClassDef(_, _, _, impl) => + tree.symbol.info // force specialization + for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield { + debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env)) + val parents = specCls.info.parents.map(TypeTree) + ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List())) + .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos + } + case _ => Nil + } sortBy (_.name.decoded) } } @@ -1836,7 +1830,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { class SpecializationTransformer(unit: CompilationUnit) extends Transformer { informProgress("specializing " + unit) override def transform(tree: Tree) = { - val resultTree = if (settings.nospecialization.value) tree + val resultTree = if (settings.nospecialization) tree else exitingSpecialize(specializeCalls(unit).transform(tree)) // Remove the final modifier and @inline annotation from anything in the @@ -1855,6 +1849,5 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } resultTree - } - } + } } } diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 92ed7fc555..313f968e93 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -217,7 +217,10 @@ abstract class TailCalls extends Transform { debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim) accessed += ctx.label - typedPos(fun.pos)(Apply(Ident(ctx.label), noTailTransform(recv) :: transformArgs)) + typedPos(fun.pos) { + val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe)) + Apply(Ident(ctx.label), noTailTransform(recv) :: args) + } } if (!ctx.isEligible) fail("it is neither private nor final so can be overridden") @@ -276,7 +279,7 @@ abstract class TailCalls extends Transform { typedPos(tree.pos)(Block( List(ValDef(newThis, This(currentClass))), - LabelDef(newCtx.label, newThis :: vpSyms, newRHS) + LabelDef(newCtx.label, newThis :: vpSyms, mkAttributedCastHack(newRHS, newCtx.label.tpe.resultType)) )) } else { @@ -373,6 +376,13 @@ abstract class TailCalls extends Transform { super.transform(tree) } } + + // Workaround for SI-6900. Uncurry installs an InfoTransformer and a tree Transformer. + // These leave us with conflicting view on method signatures; the parameter symbols in + // the MethodType can be clones of the ones originally found on the parameter ValDef, and + // consequently appearing in the typechecked RHS of the method. + private def mkAttributedCastHack(tree: Tree, tpe: Type) = + gen.mkAttributedCast(tree, tpe) } // collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 8fd1df7cea..2f5cb23abb 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -347,10 +347,14 @@ abstract class UnCurry extends InfoTransform } else { log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}") + def canUseDirectly(recv: Tree) = ( + recv.tpe.typeSymbol.isSubClass(FunctionClass(0)) + && treeInfo.isExprSafeToInline(recv) + ) arg match { // don't add a thunk for by-name argument if argument already is an application of // a Function0. We can then remove the application and use the existing Function0. - case Apply(Select(recv, nme.apply), Nil) if recv.tpe.typeSymbol isSubClass FunctionClass(0) => + case Apply(Select(recv, nme.apply), Nil) if canUseDirectly(recv) => recv case _ => newFunction0(arg) @@ -424,7 +428,7 @@ abstract class UnCurry extends InfoTransform val result = ( // TODO - settings.noassertions.value temporarily retained to avoid // breakage until a reasonable interface is settled upon. - if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value))) + if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions))) replaceElidableTree(tree) else translateSynchronized(tree) match { case dd @ DefDef(mods, name, tparams, _, tpt, rhs) => @@ -628,7 +632,8 @@ abstract class UnCurry extends InfoTransform * * This transformation erases the dependent method types by: * - Widening the formal parameter type to existentially abstract - * over the prior parameters (using `packSymbols`) + * over the prior parameters (using `packSymbols`). This transformation + * is performed in the the `InfoTransform`er [[scala.reflect.internal.transform.UnCurry]]. * - Inserting casts in the method body to cast to the original, * precise type. * @@ -656,15 +661,14 @@ abstract class UnCurry extends InfoTransform */ def erase(dd: DefDef): (List[List[ValDef]], Tree) = { import dd.{ vparamss, rhs } - val vparamSyms = vparamss flatMap (_ map (_.symbol)) - val paramTransforms: List[ParamTransform] = - vparamss.flatten.map { p => - val declaredType = p.symbol.info - // existentially abstract over value parameters - val packedType = typer.packSymbols(vparamSyms, declaredType) - if (packedType =:= declaredType) Identity(p) + map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) => + val packedType = infoParam.info + if (packedType =:= p.symbol.info) Identity(p) else { + // The Uncurry info transformer existentially abstracted over value parameters + // from the previous parameter lists. + // Change the type of the param symbol p.symbol updateInfo packedType @@ -676,8 +680,8 @@ abstract class UnCurry extends InfoTransform // the method body to refer to this, rather than the parameter. val tempVal: ValDef = { val tempValName = unit freshTermName (p.name + "$") - val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType) - atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType))) + val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info) + atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info))) } Packed(newParam, tempVal) } @@ -695,13 +699,6 @@ abstract class UnCurry extends InfoTransform Block(tempVals, rhsSubstituted) } - // update the type of the method after uncurry. - dd.symbol updateInfo { - val GenPolyType(tparams, tp) = dd.symbol.info - logResult(s"erased dependent param types for ${dd.symbol.info}") { - GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType)) - } - } (allParams :: Nil, rhs1) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 0371df3b10..92b7700c04 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -573,6 +573,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { assert(tp.isInstanceOf[SingletonType]) val toString = tp match { case ConstantType(c) => c.escapedStringValue + case _ if tp.typeSymbol.isModuleClass => tp.typeSymbol.name.toString case _ => tp.toString } Const.unique(tp, new ValueConst(tp, tp.widen, toString)) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index 125e9a3b65..31b04d0bd6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -15,9 +15,9 @@ import scala.reflect.internal.util.Position /** Optimize and analyze matches based on their TreeMaker-representation. * * The patmat translation doesn't rely on this, so it could be disabled in principle. - * - * TODO: split out match analysis + * - well, not quite: the backend crashes if we emit duplicates in switches (e.g. SI-7290) */ +// TODO: split out match analysis trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { import global._ import global.definitions._ @@ -435,7 +435,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) => Some(CaseDef(pattern, guard, body)) // alternatives - case AlternativesTreeMaker(_, altss, _) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported => + case AlternativesTreeMaker(_, altss, pos) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported => val switchableAlts = altss map { case SwitchableTreeMaker(pattern) :: Nil => Some(pattern) @@ -445,7 +445,17 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // succeed if they were all switchable sequence(switchableAlts) map { switchableAlts => - CaseDef(Alternative(switchableAlts), guard, body) + def extractConst(t: Tree) = t match { + case Literal(const) => const + case _ => t + } + // SI-7290 Discard duplicate alternatives that would crash the backend + val distinctAlts = distinctBy(switchableAlts)(extractConst) + if (distinctAlts.size < switchableAlts.size) { + val duplicated = switchableAlts.groupBy(extractConst).flatMap(_._2.drop(1).take(1)) // report the first duplicated + global.currentUnit.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}") + } + CaseDef(Alternative(distinctAlts), guard, body) } case _ => // debug.patmat("can't emit switch for "+ makers) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 76268f3ecd..4dff445fe8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -522,7 +522,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) val (suppression, requireSwitch): (Suppression, Boolean) = - if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false) + if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false) else scrut match { case Typed(tree, tpt) => val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 62c584e97b..567d5d0ecd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -66,9 +66,9 @@ trait Adaptations { ) } - if (settings.noAdaptedArgs.value) + if (settings.noAdaptedArgs) adaptWarning("No automatic adaptation here: use explicit parentheses.") - else if (settings.warnAdaptedArgs.value) + else if (settings.warnAdaptedArgs) adaptWarning( if (args.isEmpty) "Adapting argument list by inserting (): " + ( if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous." @@ -77,7 +77,7 @@ trait Adaptations { else "Adapting argument list by creating a " + args.size + "-tuple: this may not be what you want." ) - !settings.noAdaptedArgs.value + !settings.noAdaptedArgs } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 36121f2653..02e1eb6f00 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -97,9 +97,9 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body) + if (global.settings.Yrangepos && !global.reporter.hasErrors) global.validatePositions(unit.body) for (workItem <- unit.toCheck) workItem() - if (settings.lint.value) + if (settings.lint) typer checkUnused unit } finally { diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 026f5f7bc8..0686b28079 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -203,7 +203,7 @@ trait Checkable { private def isEffectivelyFinal(sym: Symbol): Boolean = ( // initialization important sym.initialize.isEffectivelyFinal || ( - settings.future.value && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final. + settings.future && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final. ) ) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 85c44a7ec4..89fc55bc2c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -919,7 +919,7 @@ trait ContextErrors { def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type], tparams: List[Symbol], kindErrors: List[String]) = issueNormalTypeError(tree, - NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value)) + NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes)) //substExpr def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) = @@ -1361,7 +1361,7 @@ trait ContextErrors { } def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) = - compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value)) + compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes)) def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) = compatibilityError( diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 9d5a9c819c..f135f7f6ae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -79,9 +79,12 @@ trait Contexts { self: Analyzer => protected def rootImports(unit: CompilationUnit): List[Symbol] = { assert(definitions.isDefinitionsInitialized, "definitions uninitialized") - if (settings.noimports.value) Nil + if (settings.noimports) Nil else if (unit.isJava) RootImports.javaList - else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList + else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) { + debuglog("Omitted import of Predef._ for " + unit) + RootImports.javaAndScalaList + } else RootImports.completeList } @@ -322,7 +325,7 @@ trait Contexts { self: Analyzer => def makeNewImport(imp: Import): Context = { val impInfo = new ImportInfo(imp, depth) - if (settings.lint.value && imp.pos.isDefined) // pos.isDefined excludes java.lang/scala/Predef imports + if (settings.lint && imp.pos.isDefined) // pos.isDefined excludes java.lang/scala/Predef imports allImportInfos(unit) ::= impInfo make(unit, imp, owner, scope, impInfo :: imports) @@ -406,7 +409,7 @@ trait Contexts { self: Analyzer => unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg) @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) { - if (settings.Yissuedebug.value) { + if (settings.Yissuedebug) { log("issue error: " + err.errMsg) (new Exception).printStackTrace() } @@ -912,7 +915,25 @@ trait Contexts { self: Analyzer => def lookupImport(imp: ImportInfo, requireExplicit: Boolean) = importedAccessibleSymbol(imp, name, requireExplicit) filter qualifies - while (!impSym.exists && imports.nonEmpty && imp1.depth > symbolDepth) { + // Java: A single-type-import declaration d in a compilation unit c of package p + // that imports a type named n shadows, throughout c, the declarations of: + // + // 1) any top level type named n declared in another compilation unit of p + // + // A type-import-on-demand declaration never causes any other declaration to be shadowed. + // + // Scala: Bindings of different kinds have a precedence defined on them: + // + // 1) Definitions and declarations that are local, inherited, or made available by a + // package clause in the same compilation unit where the definition occurs have + // highest precedence. + // 2) Explicit imports have next highest precedence. + def depthOk(imp: ImportInfo) = ( + imp.depth > symbolDepth + || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth) + ) + + while (!impSym.exists && imports.nonEmpty && depthOk(imports.head)) { impSym = lookupImport(imp1, requireExplicit = false) if (!impSym.exists) imports = imports.tail @@ -1047,7 +1068,7 @@ trait Contexts { self: Analyzer => if (result == NoSymbol) selectors = selectors.tail } - if (settings.lint.value && selectors.nonEmpty && result != NoSymbol && pos != NoPosition) + if (settings.lint && selectors.nonEmpty && result != NoSymbol && pos != NoPosition) recordUsage(current, result) // Harden against the fallout from bugs like SI-6745 diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index 4075aa26f7..7092f00bff 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -115,7 +115,7 @@ trait EtaExpansion { self: Analyzer => val origTpe = sym.tpe val isRepeated = definitions.isRepeatedParamType(origTpe) // SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala - val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropIllegalStarTypes(origTpe) + val droppedStarTpe = if (settings.etaExpandKeepsStar) origTpe else dropIllegalStarTypes(origTpe) val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree) (valDef, isRepeated) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 8f52088b19..85e31347be 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -298,7 +298,7 @@ trait Implicits { def pos = if (pos0 != NoPosition) pos0 else tree.pos def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = { - if (settings.XlogImplicits.value) + if (settings.XlogImplicits) reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason) SearchFailure } @@ -1118,7 +1118,7 @@ trait Implicits { ) // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) - if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) + if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled" @@ -1141,7 +1141,7 @@ trait Implicits { if (args contains EmptyTree) EmptyTree else typedPos(tree.pos.focus) { val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList) - if (settings.debug.value) println("generated manifest: "+mani) // DEBUG + if (settings.debug) println("generated manifest: "+mani) // DEBUG mani } @@ -1316,7 +1316,7 @@ trait Implicits { } } - if (result.isFailure && settings.debug.value) + if (result.isFailure && settings.debug) log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType) result diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 495ac1c086..8d7830897d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -131,7 +131,7 @@ trait Infer extends Checkable { else if (optionArgs.nonEmpty) if (nbSubPats == 1) { val productArity = productArgs.size - if (productArity > 1 && settings.lint.value) + if (productArity > 1 && settings.lint) global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}") optionArgs } @@ -338,7 +338,7 @@ trait Infer extends Checkable { sym1 = sym if (sym1 == NoSymbol) { - if (settings.debug.value) { + if (settings.debug) { Console.println(context) Console.println(tree) Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType)) @@ -378,7 +378,7 @@ trait Infer extends Checkable { try pre.memberType(sym1) catch { case ex: MalformedType => - if (settings.debug.value) ex.printStackTrace + if (settings.debug) ex.printStackTrace val sym2 = underlyingSymbol(sym1) val itype = pre.memberType(sym2) ErrorUtils.issueTypeError( diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 07c1e732d3..de3010c371 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -530,7 +530,13 @@ trait Namers extends MethodSynthesis { // Setting the position at the import means that if there is // more than one hidden name, the second will not be warned. // So it is the position of the actual hidden name. - checkNotRedundant(tree.pos withPoint fromPos, from, to) + // + // Note: java imports have precence over definitions in the same package + // so don't warn for them. There is a corresponding special treatment + // in the shadowing rules in typedIdent to (SI-7232). In any case, + // we shouldn't be emitting warnings for .java source files. + if (!context.unit.isJava) + checkNotRedundant(tree.pos withPoint fromPos, from, to) } } @@ -666,7 +672,7 @@ trait Namers extends MethodSynthesis { m.updateAttachment(new ConstructorDefaultsAttachment(tree, null)) } val owner = tree.symbol.owner - if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) { + if (settings.lint && owner.isPackageObjectClass && !mods.isImplicit) { context.unit.warning(tree.pos, "it is not recommended to define classes/objects inside of package objects.\n" + "If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead." @@ -702,7 +708,7 @@ trait Namers extends MethodSynthesis { // check that lower bound is not an F-bound // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed for (tp1 @ TypeRef(_, sym, _) <- lo) { - if (settings.breakCycles.value) { + if (settings.breakCycles) { if (!sym.maybeInitialize) { log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}") return sym diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index d5ecb687b0..6921f8ce27 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -499,7 +499,7 @@ trait NamesDefaults { self: Analyzer => // disable conforms as a view... val errsBefore = reporter.ERROR.count try typer.silent { tpr => - val res = tpr.typed(arg, subst(paramtpe)) + val res = tpr.typed(arg.duplicate, subst(paramtpe)) // better warning for SI-5044: if `silent` was not actually silent give a hint to the user // [H]: the reason why `silent` is not silent is because the cyclic reference exception is // thrown in a context completely different from `context` here. The exception happens while diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 40428272cf..efd4fd804f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -142,7 +142,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } // This has become noisy with implicit classes. - if (settings.lint.value && settings.developer.value) { + if (settings.lint && settings.developer) { clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym => val alts = clazz.info.decl(sym.name).alternatives if (alts.size > 1) @@ -307,7 +307,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans infoStringWithLocation(other), infoStringWithLocation(member) ) - else if (settings.debug.value) + else if (settings.debug) analyzer.foundReqMsg(member.tpe, other.tpe) else "" @@ -407,7 +407,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) { // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here. // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches. - if (!settings.overrideVars.value) + if (!settings.overrideVars) overrideError("cannot override a mutable variable") } else if (member.isAnyOverride && @@ -431,7 +431,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } else { checkOverrideTypes() checkOverrideDeprecated() - if (settings.warnNullaryOverride.value) { + if (settings.warnNullaryOverride) { if (other.paramss.isEmpty && !member.paramss.isEmpty) { unit.warning(member.pos, "non-nullary method overrides nullary method") } @@ -920,7 +920,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def apply(tp: Type) = mapOver(tp).normalize } - def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match { + def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match { case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply => unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567 case _ => @@ -1199,7 +1199,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans catch { case ex: TypeError => unit.error(tree0.pos, ex.getMessage()) - if (settings.explaintypes.value) { + if (settings.explaintypes) { val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds) (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ)) (argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi)) @@ -1537,9 +1537,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) => checkDeprecatedOvers(tree) checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef]) - if (settings.warnNullaryUnit.value) + if (settings.warnNullaryUnit) checkNullaryMethodReturnType(sym) - if (settings.warnInaccessible.value) { + if (settings.warnInaccessible) { if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic) checkAccessibilityOfReferencedTypes(tree) } @@ -1642,7 +1642,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans result } catch { case ex: TypeError => - if (settings.debug.value) ex.printStackTrace() + if (settings.debug) ex.printStackTrace() unit.error(tree.pos, ex.getMessage()) tree } finally { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index fb692a1954..e22dc73b53 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -91,7 +91,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (!found.isErroneous && !req.isErroneous) { val msg = analyzer.ErrorUtils.typeErrorMsg(found, req, typer.infer.isPossiblyMissingArgs(found, req)) typer.context.error(pos, analyzer.withAddendum(pos)(msg)) - if (settings.explaintypes.value) + if (settings.explaintypes) explainTypes(found, req) } } @@ -241,7 +241,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT // also exists in a superclass, because they may be surprised // to find out that a constructor parameter will shadow a // field. See SI-4762. - if (settings.lint.value) { + if (settings.lint) { if (sym.isPrivateLocal && sym.paramss.isEmpty) { qual.symbol.ancestors foreach { parent => parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 => @@ -256,9 +256,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT } } - // direct calls to aliases of param accessors to the superclass in order to avoid + + def isAccessibleFromSuper(sym: Symbol) = { + val pre = SuperType(sym.owner.tpe, qual.tpe) + localTyper.context.isAccessible(sym, pre, superAccess = true) + } + + // Direct calls to aliases of param accessors to the superclass in order to avoid // duplicating fields. - if (sym.isParamAccessor && sym.alias != NoSymbol) { + // ... but, only if accessible (SI-6793) + if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) { val result = (localTyper.typedPos(tree.pos) { Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias) }).asInstanceOf[Select] @@ -296,7 +303,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT case Super(_, mix) => if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) { - if (!settings.overrideVars.value) + if (!settings.overrideVars) unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf) } else if (isDisallowed(sym)) { unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 5dc422bc1a..c531caa2e8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -93,7 +93,7 @@ trait SyntheticMethods extends ast.TreeDSL { // like Tags and Arrays which are not robust and infer things // which they shouldn't. val accessorLub = ( - if (settings.Xexperimental.value) { + if (settings.Xexperimental) { global.weakLub(accessors map (_.tpe.finalResultType))._1 match { case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) case tp => tp @@ -336,7 +336,7 @@ trait SyntheticMethods extends ast.TreeDSL { def shouldGenerate(m: Symbol) = { !hasOverridingImplementation(m) || { clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && { - if (settings.lint.value) { + if (settings.lint) { (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m => currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics") } diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index dc11c5fe96..1c8d37ef39 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -69,7 +69,7 @@ abstract class TreeCheckers extends Analyzer { // new symbols if (newSyms.nonEmpty) { informFn(newSyms.size + " new symbols.") - val toPrint = if (settings.debug.value) sortedNewSyms mkString " " else "" + val toPrint = if (settings.debug) sortedNewSyms mkString " " else "" newSyms.clear() if (toPrint != "") @@ -120,7 +120,7 @@ abstract class TreeCheckers extends Analyzer { def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))} def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg) def informFn(msg: Any) { - if (settings.verbose.value || settings.debug.value) + if (settings.verbose || settings.debug) println("[check: %s] %s".format(phase.prev, msg)) } @@ -137,7 +137,7 @@ abstract class TreeCheckers extends Analyzer { } def checkTrees() { - if (settings.verbose.value) + if (settings.verbose) Console.println("[consistency check at the beginning of phase " + phase + "]") currentRun.units foreach (x => wrap(x)(check(x))) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 46740cd03c..5f45fead91 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -545,7 +545,7 @@ trait TypeDiagnostics { // Error suppression will squash some of these warnings unless we circumvent it. // It is presumed if you are using a -Y option you would really like to hear // the warnings you've requested. - if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK) + if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK) context.warning(tree.pos, "dead code following this construct", force = true) tree } @@ -585,7 +585,7 @@ trait TypeDiagnostics { // but it seems that throwErrors excludes some of the errors that should actually be // buffered, causing TypeErrors to fly around again. This needs some more investigation. if (!context0.reportErrors) throw ex - if (settings.debug.value) ex.printStackTrace() + if (settings.debug) ex.printStackTrace() ex match { case CyclicReference(sym, info: TypeCompleter) => diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala index 65a3fedbd2..eb05486dca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala @@ -50,7 +50,7 @@ trait StructuredTypeStrings extends DestructureTypes { else block(level, grouping)(name, nodes) } private def shortClass(x: Any) = { - if (settings.debug.value) { + if (settings.debug) { val name = (x.getClass.getName split '.').last val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 6719411700..a7b68ee6f8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1141,14 +1141,14 @@ trait Typers extends Adaptations with Tags { // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially // infinite expansion if pt is constant type () if (sym == UnitClass) { // (12) - if (settings.warnValueDiscard.value) + if (settings.warnValueDiscard) context.unit.warning(tree.pos, "discarded non-Unit value") return typedPos(tree.pos, mode, pt) { Block(List(tree), Literal(Constant(()))) } } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) { - if (settings.warnNumericWiden.value) + if (settings.warnNumericWiden) context.unit.warning(tree.pos, "implicit numeric widening") return typedPos(tree.pos, mode, pt) { Select(tree, "to" + sym.name) @@ -1167,7 +1167,7 @@ trait Typers extends Adaptations with Tags { val coercion = inferView(tree, tree.tpe, pt, reportAmbiguous = true) if (coercion != EmptyTree) { def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe - if (settings.logImplicitConv.value) + if (settings.logImplicitConv) unit.echo(tree.pos, msg) debuglog(msg) @@ -1178,9 +1178,9 @@ trait Typers extends Adaptations with Tags { } } } - if (settings.debug.value) { + if (settings.debug) { log("error tree = " + tree) - if (settings.explaintypes.value) explainTypes(tree.tpe, pt) + if (settings.explaintypes) explainTypes(tree.tpe, pt) } val found = tree.tpe @@ -1294,7 +1294,7 @@ trait Typers extends Adaptations with Tags { inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match { case EmptyTree => qual case coercion => - if (settings.logImplicitConv.value) + if (settings.logImplicitConv) unit.echo(qual.pos, "applied implicit conversion from %s to %s = %s".format( qual.tpe, searchTemplate, coercion.symbol.defString)) @@ -1332,8 +1332,7 @@ trait Typers extends Adaptations with Tags { def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { def onError(reportError: => Tree): Tree = context.tree match { case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => - ( silent (_.typedArgs(args, mode)) - map (_.asInstanceOf[List[Tree]]) + ( silent (_.typedArgs(args.map(_.duplicate), mode)) filter (xs => !(xs exists (_.isErrorTyped))) map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors)) orElse ( _ => reportError) @@ -1755,12 +1754,12 @@ trait Typers extends Adaptations with Tags { if (!(selfType <:< parent.tpe.typeOfThis) && !phase.erasedTypes && !context.owner.isSynthetic && // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE) - !settings.noSelfCheck.value && // setting to suppress this very check + !settings.noSelfCheck && // setting to suppress this very check !selfType.isErroneous && !parent.tpe.isErroneous) { pending += ParentSelfTypeConformanceError(parent, selfType) - if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis) + if (settings.explaintypes) explainTypes(selfType, parent.tpe.typeOfThis) } if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError)) @@ -1859,6 +1858,9 @@ trait Typers extends Adaptations with Tags { } val impl2 = finishMethodSynthesis(impl1, clazz, context) + if (mdef.symbol == PredefModule) + ensurePredefParentsAreInSameSourceFile(impl2) + // SI-5954. On second compile of a companion class contained in a package object we end up // with some confusion of names which leads to having two symbols with the same name in the // same owner. Until that can be straightened out we will warn on companion objects in package @@ -1887,6 +1889,12 @@ trait Typers extends Adaptations with Tags { treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType } + + private def ensurePredefParentsAreInSameSourceFile(template: Template) = { + val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass) + if (parentSyms exists (_.associatedFile != PredefModule.associatedFile)) + unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.") + } /** In order to override this in the TreeCheckers Typer so synthetics aren't re-added * all the time, it is exposed here the module/class typing methods go through it. * ...but it turns out it's also the ideal spot for namer/typer coordination for @@ -2310,7 +2318,7 @@ trait Typers extends Adaptations with Tags { tdef.symbol.annotations.map(_.completeInfo()) // @specialized should not be pickled when compiling with -no-specialize - if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) { + if (settings.nospecialization && currentRun.compiles(tdef.symbol)) { tdef.symbol.removeAnnotation(definitions.SpecializedClass) tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass) } @@ -2502,7 +2510,7 @@ trait Typers extends Adaptations with Tags { // TODO: add fallback __match sentinel to predef val matchStrategy: Tree = - if (!(newPatternMatching && settings.Xexperimental.value && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen + if (!(newPatternMatching && settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) orElse (_ => null) if (matchStrategy ne null) // virtualize @@ -3568,7 +3576,7 @@ trait Typers extends Adaptations with Tags { // If there are dummy type arguments in typeFun part, it suggests we // must type the actual constructor call, not only the select. The value // arguments are how the type arguments will be inferred. - if (targs.isEmpty && typedFun0.exists(t => isDummyAppliedType(t.tpe))) + if (targs.isEmpty && typedFun0.exists(t => t.tpe != null && isDummyAppliedType(t.tpe))) logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _)))) else typedFun0 @@ -3681,77 +3689,9 @@ trait Typers extends Adaptations with Tags { }) } - def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type? - sym.isTypeParameter && sym.owner.isJavaDefined - - /** If we map a set of hidden symbols to their existential bounds, we - * have a problem: the bounds may themselves contain references to the - * hidden symbols. So this recursively calls existentialBound until - * the typeSymbol is not amongst the symbols being hidden. - */ - def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = { - def safeBound(t: Type): Type = - if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t - - def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match { - case tp @ RefinedType(parents, decls) => - val parents1 = parents mapConserve safeBound - if (parents eq parents1) tp - else copyRefinedType(tp, parents1, decls) - case tp => tp - } - - // Hanging onto lower bound in case anything interesting - // happens with it. - mapFrom(hidden)(s => s.existentialBound match { - case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s)) - case _ => hiBound(s) - }) - } - - /** Given a set `rawSyms` of term- and type-symbols, and a type - * `tp`, produce a set of fresh type parameters and a type so that - * it can be abstracted to an existential type. Every type symbol - * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of - * type `T` in `rawSyms` is given an associated type symbol of the - * following form: - * - * type x.type <: T with Singleton - * - * The name of the type parameter is `x.type`, to produce nice - * diagnostics. The Singleton parent ensures that the type - * parameter is still seen as a stable type. Type symbols in - * rawSyms are fully replaced by the new symbols. Term symbols are - * also replaced, except for term symbols of an Ident tree, where - * only the type of the Ident is changed. - */ - protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = { - val allBounds = existentialBoundsExcludingHidden(rawSyms) - val typeParams: List[Symbol] = rawSyms map { sym => - val name = sym.name match { - case x: TypeName => x - case x => tpnme.singletonName(x) - } - val bound = allBounds(sym) - val sowner = if (isRawParameter(sym)) context.owner else sym.owner - val quantified = sowner.newExistential(name, sym.pos) - - quantified setInfo bound.cloneInfo(quantified) - } - // Higher-kinded existentials are not yet supported, but this is - // tpeHK for when they are: "if a type constructor is expected/allowed, - // tpeHK must be called instead of tpe." - val typeParamTypes = typeParams map (_.tpeHK) - def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes) - - creator(typeParams map (_ modifyInfo doSubst), doSubst(tp)) - } - /** Compute an existential type from raw hidden symbols `syms` and type `tp` */ - def packSymbols(hidden: List[Symbol], tp: Type): Type = - if (hidden.isEmpty) tp - else existentialTransform(hidden, tp)(existentialAbstraction) + def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner)) def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = ctx.owner.isTerm && @@ -3870,8 +3810,16 @@ trait Typers extends Adaptations with Tags { if (vd.symbol.tpe.isVolatile) AbstractionFromVolatileTypeError(vd) val tpt1 = typedType(tree.tpt, mode) - existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => - TypeTree(newExistentialType(tparams, tp)) setOriginal tree + existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => { + val original = tpt1 match { + case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses)) + case _ => { + debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree") + tree + } + } + TypeTree(newExistentialType(tparams, tp)) setOriginal original + } ) } @@ -4095,7 +4043,7 @@ trait Typers extends Adaptations with Tags { if (ann.tpe == null) { // an annotated type val selfsym = - if (!settings.selfInAnnots.value) + if (!settings.selfInAnnots) NoSymbol else arg1.tpe.selfsym orElse { @@ -4382,6 +4330,12 @@ trait Typers extends Adaptations with Tags { treeCopy.New(tree, tpt1).setType(tp) } + def functionTypeWildcard(tree: Tree, arity: Int): Type = { + val tp = functionType(List.fill(arity)(WildcardType), WildcardType) + if (tp == NoType) MaxFunctionArityError(tree) + tp + } + def typedEta(expr1: Tree): Tree = expr1.tpe match { case TypeRef(_, ByNameParamClass, _) => val expr2 = Function(List(), expr1) setPos expr1.pos @@ -4393,10 +4347,10 @@ trait Typers extends Adaptations with Tags { typed1(expr2, mode, pt) case PolyType(_, MethodType(formals, _)) => if (isFunctionType(pt)) expr1 - else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType)) + else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length)) case MethodType(formals, _) => if (isFunctionType(pt)) expr1 - else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType)) + else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length)) case ErrorType => expr1 case _ => @@ -4668,7 +4622,7 @@ trait Typers extends Adaptations with Tags { if (isPastTyper) t.tpe match { case OverloadedType(pre, alts) => if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) () - else if (settings.debug.value) printCaller( + else if (settings.debug) printCaller( s"""|Select received overloaded type during $phase, but typer is over. |If this type reaches the backend, we are likely doomed to crash. |$t has these overloads: @@ -4702,11 +4656,13 @@ trait Typers extends Adaptations with Tags { def handleMissing: Tree = { def errorTree = missingSelectErrorTree(tree, qual, name) def asTypeSelection = ( - if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) { + if (context.unit.isJava && name.isTypeName) { + // SI-3120 Java uses the same syntax, A.B, to express selection from the + // value A and from the type A. We have to try both. atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match { case EmptyTree => None case tree1 => Some(typed1(tree1, mode, pt)) - } + } } else None ) @@ -4836,7 +4792,7 @@ trait Typers extends Adaptations with Tags { */ def typedIdent(tree: Tree, name: Name): Tree = { // setting to enable unqualified idents in empty package (used by the repl) - def inEmptyPackage = if (settings.exposeEmptyPackage.value) lookupInEmpty(name) else NoSymbol + def inEmptyPackage = if (settings.exposeEmptyPackage) lookupInEmpty(name) else NoSymbol def issue(err: AbsTypeError) = { // Avoiding some spurious error messages: see SI-2388. @@ -4957,7 +4913,7 @@ trait Typers extends Adaptations with Tags { AppliedTypeNoParametersError(tree, tpt1.tpe) } else { //Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}") - if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug + if (settings.debug) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams) } } @@ -5003,6 +4959,13 @@ trait Typers extends Adaptations with Tags { } def typedTry(tree: Try) = { + tree match { + case Try(_, Nil, EmptyTree) => + if (!isPastTyper) context.warning(tree.pos, + "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.") + case _ => + } + var block1 = typed(tree.block, pt) var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt) @@ -5153,7 +5116,7 @@ trait Typers extends Adaptations with Tags { def typedLiteral(tree: Literal) = { val value = tree.value // Warn about likely interpolated strings which are missing their interpolators - if (settings.lint.value) value match { + if (settings.lint) value match { case Constant(s: String) => def names = InterpolatorIdentRegex findAllIn s map (n => newTermName(n stripPrefix "$")) val shouldWarn = ( @@ -5335,7 +5298,7 @@ trait Typers extends Adaptations with Tags { reportTypeError(context, tree.pos, ex) setError(tree) case ex: Exception => - if (settings.debug.value) // @M causes cyclic reference error + if (settings.debug) // @M causes cyclic reference error Console.println("exception when typing "+tree+", pt = "+ptPlugins) if (context != null && context.unit.exists && tree != null) logError("AT: " + (tree.pos).dbgString, ex) diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index aa4128f1a7..7f9b81e1ec 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -281,11 +281,24 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab private def traverse() = { val classBuf = immutable.Vector.newBuilder[ClassRep] val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath] - dir foreach { f => - if (!f.isDirectory && validClassFile(f.name)) - classBuf += ClassRep(Some(f), None) - else if (f.isDirectory && validPackage(f.name)) - packageBuf += new DirectoryClassPath(f, context) + dir foreach { + f => + // Optimization: We assume the file was not changed since `dir` called + // `Path.apply` and categorized existent files as `Directory` + // or `File`. + val isDirectory = f match { + case pf: io.PlainFile => pf.givenPath match { + case _: io.Directory => true + case _: io.File => false + case _ => f.isDirectory + } + case _ => + f.isDirectory + } + if (!isDirectory && validClassFile(f.name)) + classBuf += ClassRep(Some(f), None) + else if (isDirectory && validPackage(f.name)) + packageBuf += new DirectoryClassPath(f, context) } (packageBuf.result(), classBuf.result()) } diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index 6997dbd402..4e1cf02a6e 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -10,7 +10,7 @@ import java.io.PrintStream * @param enabled: A condition that must be true for trace info to be produced. */ class SimpleTracer(out: PrintStream, enabled: Boolean = true) { - def apply[T](msg: String)(value: T): T = { + def apply[T](msg: => String)(value: T): T = { if (enabled) out.println(msg+value) value } diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala index 3cdbcc5110..d2e9238e8f 100644 --- a/src/compiler/scala/tools/nsc/util/TreeSet.scala +++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala @@ -40,12 +40,22 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] { tree = add(tree) } - def iterator = { - def elems(t: Tree): Iterator[T] = { - if (t eq null) Iterator.empty - else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r) + def iterator = toList.iterator + + override def foreach[U](f: T => U) { + def loop(t: Tree) { + if (t ne null) { + loop(t.l) + f(t.elem) + loop(t.r) + } } - elems(tree) + loop(tree) + } + override def toList = { + val xs = scala.collection.mutable.ListBuffer[T]() + foreach(xs += _) + xs.toList } override def toString(): String = { diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 3e227ce8aa..f6ed5f8f1c 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -250,7 +250,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => throwIfErrors() val className = mdef.symbol.fullName - if (settings.debug.value) println("generated: "+className) + if (settings.debug) println("generated: "+className) def moduleFileName(className: String) = className + "$" val jclazz = jClass.forName(moduleFileName(className), true, classLoader) val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get @@ -349,11 +349,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => lazy val exporter = importer.reverse def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches { - if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType) + if (compiler.settings.verbose) println("importing "+tree+", expectedType = "+expectedType) val ctree: compiler.Tree = importer.importTree(tree) val cexpectedType: compiler.Type = importer.importType(expectedType) - if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType) + if (compiler.settings.verbose) println("typing "+ctree+", expectedType = "+expectedType) val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled) val uttree = exporter.importTree(ttree) uttree @@ -369,12 +369,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => } private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches { - if (compiler.settings.verbose.value) println(s"importing pt=$pt, tree=$tree, pos=$pos") + if (compiler.settings.verbose) println(s"importing pt=$pt, tree=$tree, pos=$pos") val ctree: compiler.Tree = importer.importTree(tree) val cpt: compiler.Type = importer.importType(pt) val cpos: compiler.Position = importer.importPosition(pos) - if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled)) + if (compiler.settings.verbose) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled)) val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos) val uitree = exporter.importTree(itree) uitree @@ -395,17 +395,17 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => } def parse(code: String): u.Tree = { - if (compiler.settings.verbose.value) println("parsing "+code) + if (compiler.settings.verbose) println("parsing "+code) val ctree: compiler.Tree = compiler.parse(code) val utree = exporter.importTree(ctree) utree } def compile(tree: u.Tree): () => Any = { - if (compiler.settings.verbose.value) println("importing "+tree) + if (compiler.settings.verbose) println("importing "+tree) val ctree: compiler.Tree = importer.importTree(tree) - if (compiler.settings.verbose.value) println("compiling "+ctree) + if (compiler.settings.verbose) println("compiling "+ctree) compiler.compile(ctree) } diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index d8e545e6b1..8d65c40a01 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -146,7 +146,7 @@ object PathResolver { import PathResolver.{ Defaults, Environment, ppcp } class PathResolver(settings: Settings, context: JavaContext) { - def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext) + def this(settings: Settings) = this(settings, if (settings.inline) new JavaContext else DefaultJavaContext) private def cmdLineOrElse(name: String, alt: String) = { (commandLineFor(name) match { @@ -240,7 +240,7 @@ class PathResolver(settings: Settings, context: JavaContext) { lazy val result = { val cp = new JavaClassPath(containers.toIndexedSeq, context) - if (settings.Ylogcp.value) { + if (settings.Ylogcp) { Console.println("Classpath built from " + settings.toConciseString) Console.println("Defaults: " + PathResolver.Defaults) Console.println("Calculated: " + Calculated) diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 099a882f10..dbdb2d02b6 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -547,7 +547,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) { try { if (!unit.isUpToDate) - if (unit.problems.isEmpty || !settings.YpresentationStrict.value) + if (unit.problems.isEmpty || !settings.YpresentationStrict) typeCheck(unit) else debugLog("%s has syntax errors. Skipped typechecking".format(unit)) else debugLog("already up to date: "+unit) diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala index 3b4a36f62d..c838606f02 100644 --- a/src/interactive/scala/tools/nsc/interactive/Main.scala +++ b/src/interactive/scala/tools/nsc/interactive/Main.scala @@ -12,7 +12,7 @@ package interactive */ object Main extends nsc.MainClass { override def processSettingsHook(): Boolean = { - if (this.settings.Yidedebug.value) { + if (this.settings.Yidedebug) { this.settings.Xprintpos.value = true this.settings.Yrangepos.value = true val compiler = new interactive.Global(this.settings, this.reporter) diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala index 04c06b9357..432400ecd2 100644 --- a/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -32,7 +32,7 @@ object REPL { val settings = new Settings(replError) reporter = new ConsoleReporter(settings) val command = new CompilerCommand(args.toList, settings) - if (command.settings.version.value) + if (command.settings.version) reporter.echo(versionMsg) else { try { @@ -50,7 +50,7 @@ object REPL { } } catch { case ex @ FatalError(msg) => - if (true || command.settings.debug.value) // !!! + if (true || command.settings.debug) // !!! ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala index 9382d5890f..8a47c1df37 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala @@ -17,7 +17,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { val compiler = new Global(settings, reporter) def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) { - if (settings.verbose.value) print(msg+" "+arg+": ") + if (settings.verbose) print(msg+" "+arg+": ") val TIMEOUT = 10 // ms val limit = System.currentTimeMillis() + randomDelayMillis val res = new Response[U] @@ -28,7 +28,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { } else res.get(TIMEOUT) match { case Some(Left(t)) => /**/ - if (settings.verbose.value) println(t) + if (settings.verbose) println(t) case Some(Right(ex)) => ex.printStackTrace() println(ex) diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala deleted file mode 100644 index 535f1ac699..0000000000 --- a/src/library/scala/LowPriorityImplicits.scala +++ /dev/null @@ -1,95 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.collection.{ mutable, immutable, generic } -import mutable.WrappedArray -import immutable.WrappedString -import generic.CanBuildFrom -import scala.language.implicitConversions - -/** The `LowPriorityImplicits` class provides implicit values that - * are valid in all Scala compilation units without explicit qualification, - * but that are partially overridden by higher-priority conversions in object - * `Predef`. - * - * @author Martin Odersky - * @since 2.8 - */ -private[scala] abstract class LowPriorityImplicits { - /** We prefer the java.lang.* boxed types to these wrappers in - * any potential conflicts. Conflicts do exist because the wrappers - * need to implement ScalaNumber in order to have a symmetric equals - * method, but that implies implementing java.lang.Number as well. - * - * Note - these are inlined because they are value classes, but - * the call to xxxWrapper is not eliminated even though it does nothing. - * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ - * because maybe loading Predef has side effects! - */ - @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) - @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) - @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) - @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) - @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) - @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) - @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) - @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) - - // These eight implicits exist solely to exclude Null from the domain of - // the boxed types, so that e.g. "var x: Int = null" is a compile time - // error rather than a delayed null pointer exception by way of the - // conversion from java.lang.Integer. If defined in the same file as - // Integer2int, they would have higher priority because Null is a subtype - // of Integer. We balance that out and create conflict by moving the - // definition into the superclass. - // - // Caution: do not adjust tightrope tension without safety goggles in place. - implicit def Byte2byteNullConflict(x: Null): Byte = sys.error("value error") - implicit def Short2shortNullConflict(x: Null): Short = sys.error("value error") - implicit def Character2charNullConflict(x: Null): Char = sys.error("value error") - implicit def Integer2intNullConflict(x: Null): Int = sys.error("value error") - implicit def Long2longNullConflict(x: Null): Long = sys.error("value error") - implicit def Float2floatNullConflict(x: Null): Float = sys.error("value error") - implicit def Double2doubleNullConflict(x: Null): Double = sys.error("value error") - implicit def Boolean2booleanNullConflict(x: Null): Boolean = sys.error("value error") - - implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = - if (xs eq null) null - else WrappedArray.make(xs) - - // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] - // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 - // unique ones by way of this implicit, let's share one. - implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { - if (xs eq null) null - else if (xs.length == 0) WrappedArray.empty[T] - else new WrappedArray.ofRef[T](xs) - } - - implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null - implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null - implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null - implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null - implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null - implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null - implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null - implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null - implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null - - implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null - implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null - - implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = - new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { - def apply(from: String) = immutable.IndexedSeq.newBuilder[T] - def apply() = immutable.IndexedSeq.newBuilder[T] - } -} - diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 9a468489a2..569157de20 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -446,3 +446,88 @@ private[scala] trait DeprecatedPredef { @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf2(format: String) = ReadStdin.readf2(format) @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf3(format: String) = ReadStdin.readf3(format) } + +/** The `LowPriorityImplicits` class provides implicit values that +* are valid in all Scala compilation units without explicit qualification, +* but that are partially overridden by higher-priority conversions in object +* `Predef`. +* +* @author Martin Odersky +* @since 2.8 +*/ +// SI-7335 Parents of Predef are defined in the same compilation unit to avoid +// cyclic reference errors compiling the standard library *without* a previously +// compiled copy on the classpath. +private[scala] abstract class LowPriorityImplicits { + import mutable.WrappedArray + import immutable.WrappedString + + /** We prefer the java.lang.* boxed types to these wrappers in + * any potential conflicts. Conflicts do exist because the wrappers + * need to implement ScalaNumber in order to have a symmetric equals + * method, but that implies implementing java.lang.Number as well. + * + * Note - these are inlined because they are value classes, but + * the call to xxxWrapper is not eliminated even though it does nothing. + * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ + * because maybe loading Predef has side effects! + */ + @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) + @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) + @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) + @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) + @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) + @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) + @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) + @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) + + // These eight implicits exist solely to exclude Null from the domain of + // the boxed types, so that e.g. "var x: Int = null" is a compile time + // error rather than a delayed null pointer exception by way of the + // conversion from java.lang.Integer. If defined in the same template as + // Integer2int, they would have higher priority because Null is a subtype + // of Integer. We balance that out and create conflict by moving the + // definition into the superclass. + // + // Caution: do not adjust tightrope tension without safety goggles in place. + implicit def Byte2byteNullConflict(x: Null): Byte = sys.error("value error") + implicit def Short2shortNullConflict(x: Null): Short = sys.error("value error") + implicit def Character2charNullConflict(x: Null): Char = sys.error("value error") + implicit def Integer2intNullConflict(x: Null): Int = sys.error("value error") + implicit def Long2longNullConflict(x: Null): Long = sys.error("value error") + implicit def Float2floatNullConflict(x: Null): Float = sys.error("value error") + implicit def Double2doubleNullConflict(x: Null): Double = sys.error("value error") + implicit def Boolean2booleanNullConflict(x: Null): Boolean = sys.error("value error") + + implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = + if (xs eq null) null + else WrappedArray.make(xs) + + // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] + // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 + // unique ones by way of this implicit, let's share one. + implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { + if (xs eq null) null + else if (xs.length == 0) WrappedArray.empty[T] + else new WrappedArray.ofRef[T](xs) + } + + implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null + implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null + implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null + implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null + implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null + implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null + implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null + implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null + implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null + + implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null + implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null + + implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = + new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { + def apply(from: String) = immutable.IndexedSeq.newBuilder[T] + def apply() = immutable.IndexedSeq.newBuilder[T] + } +} diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index d91f70da75..66fbad7643 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -41,12 +41,7 @@ package object parallel { private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString) - private[parallel] def getTaskSupport: TaskSupport = - if (scala.util.Properties.isJavaAtLeast("1.6")) { - val vendor = scala.util.Properties.javaVmVendor - if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport - else new ThreadPoolTaskSupport - } else new ThreadPoolTaskSupport + private[parallel] def getTaskSupport: TaskSupport = new ForkJoinTaskSupport val defaultTaskSupport: TaskSupport = getTaskSupport diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 10e6d7d9a4..97d7da3f2d 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -1,5 +1,7 @@ package scala +import java.lang.reflect.{ AccessibleObject => jAccessibleObject } + package object reflect { // in the new scheme of things ClassManifests are aliased to ClassTags @@ -42,6 +44,18 @@ package object reflect { def classTag[T](implicit ctag: ClassTag[T]) = ctag + /** Make a java reflection object accessible, if it is not already + * and it is possible to do so. If a SecurityException is thrown in the + * attempt, it is caught and discarded. + */ + def ensureAccessible[T <: jAccessibleObject](m: T): T = { + if (!m.isAccessible) { + try m setAccessible true + catch { case _: SecurityException => } // does nothing + } + m + } + // anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala // implementation is hardwired into `scala.reflect.reify.Taggers` // using the mechanism implemented in `scala.tools.reflect.FastTrack` diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 753dd0205e..ea1f392e2b 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -158,13 +158,7 @@ object ScalaRunTime { // Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957 // More background at ticket #2318. - def ensureAccessible(m: JMethod): JMethod = { - if (!m.isAccessible) { - try m setAccessible true - catch { case _: SecurityException => () } - } - m - } + def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) def checkInitialized[T <: AnyRef](x: T): T = if (x == null) throw new UninitializedError else x diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index 3f61062073..46e9621b31 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -6,7 +6,6 @@ package scala.tools.partest import scala.tools.nsc._ -import io.Directory import util.{BatchSourceFile, CommandLineParser} import reporters.{Reporter, ConsoleReporter} @@ -21,8 +20,8 @@ abstract class DirectTest extends App { def show(): Unit // the test file or dir, and output directory - def testPath = io.File(sys.props("partest.test-path")) - def testOutput = io.Directory(sys.props("partest.output")) + def testPath = SFile(sys.props("partest.test-path")) + def testOutput = Directory(sys.props("partest.output")) // override to add additional settings with strings def extraSettings: String = "" diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala index f5333cc5f9..b12ec0de61 100644 --- a/src/partest/scala/tools/partest/IcodeTest.scala +++ b/src/partest/scala/tools/partest/IcodeTest.scala @@ -5,9 +5,7 @@ package scala.tools.partest -import scala.tools.nsc._ -import nest.FileUtil._ -import io.Directory +import scala.tools.partest.nest.FileUtil.compareContents /** A trait for testing icode. All you need is this in a * partest source file: diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala index 5d98a8be81..16f1a6933f 100644 --- a/src/partest/scala/tools/partest/PartestDefaults.scala +++ b/src/partest/scala/tools/partest/PartestDefaults.scala @@ -1,13 +1,10 @@ package scala.tools package partest -import nsc.io.{ File, Path, Directory } -import scala.tools.util.PathResolver -import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty } -import java.lang.Runtime.getRuntime +import scala.tools.nsc.Properties.{ propOrElse, propOrNone, propOrEmpty } +import java.lang.Runtime.{ getRuntime => runtime } object PartestDefaults { - import nsc.Properties._ def testRootName = propOrNone("partest.root") def srcDirName = propOrElse("partest.srcdir", "files") @@ -23,7 +20,7 @@ object PartestDefaults { def testBuild = propOrNone("partest.build") def errorCount = propOrElse("partest.errors", "0").toInt - def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse getRuntime.availableProcessors + def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse runtime.availableProcessors def timeout = "1200000" } diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala index 13207b16fd..b5b09a753a 100644 --- a/src/partest/scala/tools/partest/PartestTask.scala +++ b/src/partest/scala/tools/partest/PartestTask.scala @@ -10,14 +10,10 @@ package scala.tools package partest import scala.util.Properties.setProp -import scala.tools.nsc.io.{ Directory, Path => SPath } -import nsc.util.ClassPath -import util.PathResolver import scala.tools.ant.sabbus.CompilationPathProperty -import java.io.File import java.lang.reflect.Method import org.apache.tools.ant.Task -import org.apache.tools.ant.types.{Path, Reference, FileSet} +import org.apache.tools.ant.types.{ Reference, FileSet} import org.apache.tools.ant.types.Commandline.Argument /** An Ant task to execute the Scala test suite (NSC). @@ -26,92 +22,42 @@ import org.apache.tools.ant.types.Commandline.Argument * - `srcdir`, * - `classpath`, * - `classpathref`, - * - `showlog`, - * - `showdiff`, * - `erroronfailed`, * - `javacmd`, * - `javaccmd`, * - `scalacopts`, - * - `timeout`, * - `debug`, * - `junitreportdir`. * * It also takes the following parameters as nested elements: * - `compilationpath`. - * - `postests`, - * - `negtests`, - * - `runtests`, - * - `jvmtests`, - * - `residenttests`, - * - `shootouttests`, - * - `scalaptests`, - * - `scalachecktests`, - * - `specializedtests`, - * - `instrumentedtests`, - * - `presentationtests`, - * - `scripttests`. * * @author Philippe Haller */ class PartestTask extends Task with CompilationPathProperty { - - def addConfiguredPosTests(input: FileSet) { - posFiles = Some(input) - } - - def addConfiguredNegTests(input: FileSet) { - negFiles = Some(input) - } - - def addConfiguredRunTests(input: FileSet) { - runFiles = Some(input) - } - - def addConfiguredJvmTests(input: FileSet) { - jvmFiles = Some(input) - } - - def addConfiguredResidentTests(input: FileSet) { - residentFiles = Some(input) - } - - def addConfiguredScalacheckTests(input: FileSet) { - scalacheckFiles = Some(input) - } - - def addConfiguredScriptTests(input: FileSet) { - scriptFiles = Some(input) - } - - def addConfiguredShootoutTests(input: FileSet) { - shootoutFiles = Some(input) - } - - def addConfiguredScalapTests(input: FileSet) { - scalapFiles = Some(input) - } - - def addConfiguredSpecializedTests(input: FileSet) { - specializedFiles = Some(input) - } - - def addConfiguredInstrumentedTests(input: FileSet) { - instrumentedFiles = Some(input) - } - - def addConfiguredPresentationTests(input: FileSet) { - presentationFiles = Some(input) - } - - def addConfiguredAntTests(input: FileSet) { - antFiles = Some(input) - } - + type Path = org.apache.tools.ant.types.Path + + private var kinds: List[String] = Nil + private var classpath: Option[Path] = None + private var debug = false + private var errorOnFailed: Boolean = true + private var jUnitReportDir: Option[File] = None + private var javaccmd: Option[File] = None + private var javacmd: Option[File] = Option(sys.props("java.home")) map (x => new File(x, "bin/java")) + private var scalacArgs: Option[Seq[Argument]] = None + private var srcDir: Option[String] = None + private var colors: Int = 0 def setSrcDir(input: String) { srcDir = Some(input) } + def setColors(input: String) { + try colors = input.toInt catch { case _: NumberFormatException => () } + if (colors > 0) + sys.props("partest.colors") = colors.toString + } + def setClasspath(input: Path) { if (classpath.isEmpty) classpath = Some(input) @@ -127,15 +73,6 @@ class PartestTask extends Task with CompilationPathProperty { def setClasspathref(input: Reference) { createClasspath().setRefid(input) } - - def setShowLog(input: Boolean) { - showLog = input - } - - def setShowDiff(input: Boolean) { - showDiff = input - } - def setErrorOnFailed(input: Boolean) { errorOnFailed = input } @@ -144,6 +81,10 @@ class PartestTask extends Task with CompilationPathProperty { javacmd = Some(input) } + def setKinds(input: String) { + kinds = words(input) + } + def setJavacCmd(input: File) { javaccmd = Some(input) } @@ -159,10 +100,6 @@ class PartestTask extends Task with CompilationPathProperty { a } - def setTimeout(delay: String) { - timeout = Some(delay) - } - def setDebug(input: Boolean) { debug = input } @@ -171,172 +108,35 @@ class PartestTask extends Task with CompilationPathProperty { jUnitReportDir = Some(input) } - private var classpath: Option[Path] = None - private var srcDir: Option[String] = None - private var javacmd: Option[File] = None - private var javaccmd: Option[File] = None - private var showDiff: Boolean = false - private var showLog: Boolean = false - private var posFiles: Option[FileSet] = None - private var negFiles: Option[FileSet] = None - private var runFiles: Option[FileSet] = None - private var jvmFiles: Option[FileSet] = None - private var residentFiles: Option[FileSet] = None - private var scalacheckFiles: Option[FileSet] = None - private var scriptFiles: Option[FileSet] = None - private var shootoutFiles: Option[FileSet] = None - private var scalapFiles: Option[FileSet] = None - private var specializedFiles: Option[FileSet] = None - private var instrumentedFiles: Option[FileSet] = None - private var presentationFiles: Option[FileSet] = None - private var antFiles: Option[FileSet] = None - private var errorOnFailed: Boolean = false - private var scalacArgs: Option[Seq[Argument]] = None - private var timeout: Option[String] = None - private var jUnitReportDir: Option[File] = None - private var debug = false - - def fileSetToDir(fs: FileSet) = Directory(fs getDir getProject) - def fileSetToArray(fs: FileSet): Array[SPath] = { - val root = fileSetToDir(fs) - (fs getDirectoryScanner getProject).getIncludedFiles map (root / _) - } - - private def getFiles(fileSet: Option[FileSet]): Array[File] = fileSet match { - case None => Array() - case Some(fs) => fileSetToArray(fs) filterNot (_ hasExtension "log") map (_.jfile) - } - - private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] = fileSet match { - case None => Array() - case Some(fs) => - def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".") - // println("----> " + fileSet) - - val fileTests = getFiles(Some(fs)) filterNot (x => shouldExclude(x.getName)) - val dirResult = getDirs(Some(fs)) filterNot (x => shouldExclude(x.getName)) - // println("dirs: " + dirResult.toList) - // println("files: " + fileTests.toList) - - dirResult ++ fileTests - } - - private def getDirs(fileSet: Option[FileSet]): Array[File] = fileSet match { - case None => Array() - case Some(fs) => - def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".") - - val dirTests: Iterator[SPath] = fileSetToDir(fs).dirs filterNot (x => shouldExclude(x.name)) - val dirResult = dirTests.toList.toArray map (_.jfile) - - dirResult - } - - - private def getPosFiles = getFilesAndDirs(posFiles) - private def getNegFiles = getFilesAndDirs(negFiles) - private def getRunFiles = getFilesAndDirs(runFiles) - private def getJvmFiles = getFilesAndDirs(jvmFiles) - private def getResidentFiles = getFiles(residentFiles) - private def getScalacheckFiles = getFilesAndDirs(scalacheckFiles) - private def getScriptFiles = getFiles(scriptFiles) - private def getShootoutFiles = getFiles(shootoutFiles) - private def getScalapFiles = getFiles(scalapFiles) - private def getSpecializedFiles = getFiles(specializedFiles) - private def getInstrumentedFiles = getFilesAndDirs(instrumentedFiles) - private def getPresentationFiles = getDirs(presentationFiles) - private def getAntFiles = getFiles(antFiles) - override def execute() { - val opts = getProject().getProperties() get "env.PARTEST_OPTS" - if (opts != null && opts.toString != "") - opts.toString.split(" ") foreach { propDef => - log("setting system property " + propDef) - val kv = propDef split "=" - val key = kv(0) substring 2 - val value = kv(1) - setProp(key, value) - } - - if (isPartestDebug || debug) { - setProp("partest.debug", "true") - nest.NestUI._verbose = true + if (debug || sys.props.contains("partest.debug")) { + nest.NestUI.setDebug() } srcDir foreach (x => setProp("partest.srcdir", x)) val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.") - - val scalaLibrary = { - (classpath.list map { fs => new File(fs) }) find { f => - f.getName match { - case "scala-library.jar" => true - case "library" if (f.getParentFile.getName == "classes") => true - case _ => false - } - } - } getOrElse sys.error("Provided classpath does not contain a Scala library.") - - val scalaReflect = { - (classpath.list map { fs => new File(fs) }) find { f => - f.getName match { - case "scala-reflect.jar" => true - case "reflect" if (f.getParentFile.getName == "classes") => true - case _ => false - } - } - } getOrElse sys.error("Provided classpath does not contain a Scala reflection library.") - - val scalaCompiler = { - (classpath.list map { fs => new File(fs) }) find { f => - f.getName match { - case "scala-compiler.jar" => true - case "compiler" if (f.getParentFile.getName == "classes") => true - case _ => false - } - } - } getOrElse sys.error("Provided classpath does not contain a Scala compiler.") - - val scalaPartest = { - (classpath.list map { fs => new File(fs) }) find { f => - f.getName match { - case "scala-partest.jar" => true - case "partest" if (f.getParentFile.getName == "classes") => true - case _ => false - } - } - } getOrElse sys.error("Provided classpath does not contain a Scala partest.") - - val scalaActors = { - (classpath.list map { fs => new File(fs) }) find { f => - f.getName match { - case "scala-actors.jar" => true - case "actors" if (f.getParentFile.getName == "classes") => true - case _ => false - } - } - } getOrElse sys.error("Provided classpath does not contain a Scala actors.") + val cpfiles = classpath.list map { fs => new File(fs) } toList + def findCp(name: String) = cpfiles find (f => + (f.getName == s"scala-$name.jar") + || (f.absolutePathSegments endsWith Seq("classes", name)) + ) getOrElse sys.error(s"Provided classpath does not contain a Scala $name element.") + + val scalaLibrary = findCp("library") + val scalaReflect = findCp("reflect") + val scalaCompiler = findCp("compiler") + val scalaPartest = findCp("partest") + val scalaActors = findCp("actors") def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a => val parts = a.getParts - if(parts eq null) Seq[String]() else parts.toSeq + if (parts eq null) Nil else parts.toSeq }) val antRunner = new scala.tools.partest.nest.AntRunner val antFileManager = antRunner.fileManager - // this is a workaround for https://issues.scala-lang.org/browse/SI-5433 - // when that bug is fixed, this paragraph of code can be safely removed - // we hack into the classloader that will become parent classloader for scalac - // this way we ensure that reflective macro lookup will pick correct Code.lift - val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader] - val path = new org.apache.tools.ant.types.Path(getProject()) - val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath) - path.setPath(newClassPath) - loader.setClassPath(path) - - antFileManager.showDiff = showDiff - antFileManager.showLog = showLog + // antFileManager.failed = runFailed antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*) antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath @@ -347,53 +147,35 @@ class PartestTask extends Task with CompilationPathProperty { javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath) javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath) scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _) - timeout foreach (antFileManager.timeout = _) - - type TFSet = (Array[File], String, String) - val testFileSets = List( - (getPosFiles, "pos", "Compiling files that are expected to build"), - (getNegFiles, "neg", "Compiling files that are expected to fail"), - (getRunFiles, "run", "Compiling and running files"), - (getJvmFiles, "jvm", "Compiling and running files"), - (getResidentFiles, "res", "Running resident compiler scenarii"), - (getScalacheckFiles, "scalacheck", "Running scalacheck tests"), - (getScriptFiles, "script", "Running script files"), - (getShootoutFiles, "shootout", "Running shootout tests"), - (getScalapFiles, "scalap", "Running scalap tests"), - (getSpecializedFiles, "specialized", "Running specialized files"), - (getInstrumentedFiles, "instrumented", "Running instrumented files"), - (getPresentationFiles, "presentation", "Running presentation compiler test files"), - (getAntFiles, "ant", "Running ant task tests") - ) - - def runSet(set: TFSet): (Int, Int, Iterable[String]) = { - val (files, name, msg) = set + + def runSet(kind: String, files: Array[File]): (Int, Int, List[String]) = { if (files.isEmpty) (0, 0, List()) else { - log(msg) - val results: Iterable[(String, TestState)] = antRunner.reflectiveRunTestsForFiles(files, name) - val (succs, fails) = resultsToStatistics(results) + log(s"Running ${files.length} tests in '$kind' at $now") + // log(s"Tests: ${files.toList}") + val results = antRunner.reflectiveRunTestsForFiles(files, kind) + val (passed, failed) = results partition (_.isOk) + val numPassed = passed.size + val numFailed = failed.size + def failedMessages = failed map (_.longStatus) - val failed: Iterable[String] = results collect { - case (path, TestState.Fail) => path + " [FAILED]" - case (path, TestState.Timeout) => path + " [TIMOUT]" - } + log(s"Completed '$kind' at $now") // create JUnit Report xml files if directory was specified jUnitReportDir foreach { d => d.mkdir - val report = testReport(name, results, succs, fails) - scala.xml.XML.save(d.getAbsolutePath+"/"+name+".xml", report) + val report = testReport(kind, results, numPassed, numFailed) + scala.xml.XML.save(d.getAbsolutePath+"/"+kind+".xml", report) } - (succs, fails, failed) + (numPassed, numFailed, failedMessages) } } - val _results = testFileSets map runSet - val allSuccesses = _results map (_._1) sum - val allFailures = _results map (_._2) sum + val _results = kinds map (k => runSet(k, TestKinds testsFor k map (_.jfile) toArray)) + val allSuccesses = _results map (_._1) sum + val allFailures = _results map (_._2) sum val allFailedPaths = _results flatMap (_._3) def f = if (errorOnFailed && allFailures > 0) (sys error _) else log(_: String) @@ -408,20 +190,17 @@ class PartestTask extends Task with CompilationPathProperty { f(msg) } - private def oneResult(res: (String, TestState)) = - <testcase name={res._1}>{ - res._2 match { - case TestState.Ok => scala.xml.NodeSeq.Empty - case TestState.Fail => <failure message="Test failed"/> - case TestState.Timeout => <failure message="Test timed out"/> - } + private def oneResult(res: TestState) = + <testcase name={res.testIdent}>{ + if (res.isOk) scala.xml.NodeSeq.Empty + else <failure message="Test failed"/> }</testcase> - private def testReport(kind: String, results: Iterable[(String, TestState)], succs: Int, fails: Int) = + private def testReport(kind: String, results: Iterable[TestState], succs: Int, fails: Int) = <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}> <properties/> { - results.map(oneResult(_)) + results map oneResult } </testsuite> } diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala new file mode 100644 index 0000000000..ec682690ca --- /dev/null +++ b/src/partest/scala/tools/partest/TestKinds.scala @@ -0,0 +1,67 @@ +package scala.tools +package partest + +import nest.PathSettings.srcDir + +object TestKinds { + val standardKinds = "pos neg run jvm res buildmanager scalacheck scalap specialized instrumented presentation ant" split "\\s+" toList + val standardArgs = standardKinds map ("--" + _) + + def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml") + def denotesTestDir(p: Path) = kindOf(p) match { + case "res" => false + case _ => p.isDirectory && p.extension == "" + } + def denotesTestPath(p: Path) = denotesTestDir(p) || denotesTestFile(p) + + // TODO + def isTestForPartest(p: Path) = ( + (p.name == "intentional-failure.scala") + || (p.path contains "test-for-partest") + ) + + def kindOf(p: Path) = { + p.toAbsolute.segments takeRight 2 head + + // (srcDir relativize p.toCanonical).segments match { + // case (".." :: "scaladoc" :: xs) => xs.head + // case xs => xs.head + // } + } + def logOf(p: Path) = { + p.parent / s"${p.stripExtension}-${kindOf(p)}.log" + // p.parent / s"${p.stripExtension}.log" + } + + // true if a test path matches the --grep expression. + private def pathMatchesExpr(path: Path, expr: String) = { + // Matches the expression if any source file contains the expr, + // or if the checkfile contains it, or if the filename contains + // it (the last is case-insensitive.) + def matches(p: Path) = ( + (p.path.toLowerCase contains expr.toLowerCase) + || (p.fileContents contains expr) + ) + def candidates = { + (path changeExtension "check") +: { + if (path.isFile) List(path) + else path.toDirectory.deepList() filter (_.isJavaOrScala) toList + } + } + + (candidates exists matches) + } + + def groupedTests(paths: List[Path]): List[(String, List[Path])] = + (paths.distinct groupBy kindOf).toList sortBy (standardKinds indexOf _._1) + + /** Includes tests for testing partest. */ + private def allTestsForKind(kind: String): List[Path] = + (srcDir / kind toDirectory).list.toList filter denotesTestPath + + def testsForPartest: List[Path] = standardKinds flatMap allTestsForKind filter isTestForPartest + def testsFor(kind: String): List[Path] = allTestsForKind(kind) filterNot isTestForPartest + def grepFor(expr: String): List[Path] = standardTests filter (t => pathMatchesExpr(t, expr)) + def standardTests: List[Path] = standardKinds flatMap testsFor + def failedTests: List[Path] = standardTests filter (p => logOf(p).isFile) +} diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala new file mode 100644 index 0000000000..ce8e72f616 --- /dev/null +++ b/src/partest/scala/tools/partest/TestState.scala @@ -0,0 +1,54 @@ +package scala.tools.partest + +import scala.tools.nsc.FatalError +import scala.tools.nsc.util.stackTraceString + +sealed abstract class TestState { + def testFile: File + def what: String + def reason: String + def transcript: List[String] + + def isOk = false + def isSkipped = false + def testIdent = testFile.testIdent + def transcriptString = transcript.mkString("\n") + + def identAndReason = testIdent + reasonString + def status = s"$what - $identAndReason" + def longStatus = status + transcriptString + def reasonString = if (reason == "") "" else s" [$reason]" + + override def toString = status +} + +object TestState { + case class Uninitialized(testFile: File) extends TestState { + def what = "uninitialized" + def reason = what + def transcript = Nil + } + case class Pass(testFile: File) extends TestState { + final override def isOk = true + def what = "pass" + def transcript: List[String] = Nil + def reason = "" + } + case class Skip(testFile: File, reason: String) extends TestState { + override def isOk = true + final override def isSkipped = true + def transcript: List[String] = Nil + def what = "skip" + } + case class Fail(testFile: File, reason: String, transcript: List[String]) extends TestState { + def what = "fail" + } + case class Crash(testFile: File, caught: Throwable, transcript: List[String]) extends TestState { + def what = "crash" + def reason = s"caught $caught_s - ${caught.getMessage}" + + private def caught_s = (caught.getClass.getName split '.').last + private def stack_s = stackTraceString(caught) + override def transcriptString = nljoin(super.transcriptString, caught_s) + } +} diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala index 93045b8c1d..1d3b79171b 100644 --- a/src/partest/scala/tools/partest/nest/AntRunner.scala +++ b/src/partest/scala/tools/partest/nest/AntRunner.scala @@ -10,24 +10,21 @@ package scala.tools.partest package nest -import java.io.File -import scala.tools.nsc.io.{ Directory } - class AntRunner extends DirectRunner { val fileManager = new FileManager { - var JAVACMD: String = "java" - var JAVAC_CMD: String = "javac" - var CLASSPATH: String = _ - var LATEST_LIB: String = _ - var LATEST_REFLECT: String = _ - var LATEST_COMP: String = _ - var LATEST_PARTEST: String = _ - var LATEST_ACTORS: String = _ - val testRootPath: String = "test" - val testRootDir: Directory = Directory(testRootPath) + var JAVACMD: String = "java" + var JAVAC_CMD: String = "javac" + var CLASSPATH: String = _ + var LATEST_LIB: String = _ + var LATEST_REFLECT: String = _ + var LATEST_COMP: String = _ + var LATEST_PARTEST: String = _ + var LATEST_ACTORS: String = _ + val testRootPath: String = "test" + val testRootDir: Directory = Directory(testRootPath) } - def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String) = + def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): List[TestState] = runTestsForFiles(kindFiles.toList, kind) } diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala deleted file mode 100644 index a8694cc0d6..0000000000 --- a/src/partest/scala/tools/partest/nest/CompileManager.scala +++ /dev/null @@ -1,182 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Philipp Haller - */ - -// $Id$ - -package scala.tools.partest -package nest - -import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io } -import scala.reflect.io.{ Directory, File => SFile, FileOperationException } -import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter } -import scala.tools.nsc.util.{ ClassPath, FakePos } -import scala.tools.nsc.Properties.{ setProp, propOrEmpty } -import scala.tools.util.PathResolver -import io.Path -import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter } -import File.pathSeparator - -sealed abstract class CompilationOutcome { - def merge(other: CompilationOutcome): CompilationOutcome - def isPositive = this eq CompileSuccess - def isNegative = this eq CompileFailed -} -case object CompileSuccess extends CompilationOutcome { - def merge(other: CompilationOutcome) = other -} -case object CompileFailed extends CompilationOutcome { - def merge(other: CompilationOutcome) = if (other eq CompileSuccess) this else other -} -case object CompilerCrashed extends CompilationOutcome { - def merge(other: CompilationOutcome) = this -} - -class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) { - shortname = true -} - -class TestSettings(cp: String, error: String => Unit) extends Settings(error) { - def this(cp: String) = this(cp, _ => ()) - - nowarnings.value = false - encoding.value = "UTF-8" - classpath.value = cp -} - -abstract class SimpleCompiler { - def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome -} - -class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { - def newGlobal(settings: Settings, reporter: Reporter): Global = - Global(settings, reporter) - - def newGlobal(settings: Settings, logWriter: FileWriter): Global = - newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter))) - - def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB) - def newSettings(outdir: String): TestSettings = { - val cp = ClassPath.join(fileManager.LATEST_LIB, outdir) - val s = new TestSettings(cp) - s.outdir.value = outdir - - s - } - - implicit class Copier(f: SFile) { - // But what if f is bigger than CHUNK?! - def copyTo(dest: Path) { - dest.toFile writeAll f.slurp - } - } - - // plugin path can be relative to test root, or cwd is out - private def updatePluginPath(options: String, out: Option[File], srcdir: Directory): String = { - val dir = fileManager.testRootDir - def pathOrCwd(p: String) = - if (p == "." && out.isDefined) { - val plugxml = "scalac-plugin.xml" - val pout = Path(out.get) - val pd = (srcdir / plugxml).toFile - if (pd.exists) pd copyTo (pout / plugxml) - pout - } else Path(p) - def absolutize(path: String) = pathOrCwd(path) match { - case x if x.isAbsolute => x.path - case x => (dir / x).toAbsolute.path - } - - val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:") - val plugins = opt1 map (_ stripPrefix "-Xplugin:") flatMap (_ split pathSeparator) map absolutize - val pluginOption = if (opt1.isEmpty) Nil else List("-Xplugin:" + (plugins mkString pathSeparator)) - - (opt2 ::: pluginOption) mkString " " - } - - def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome = { - val testSettings = out match { - case Some(f) => newSettings(f.getAbsolutePath) - case _ => newSettings() - } - val logWriter = new FileWriter(log) - - // this api has no notion of srcdir, so fake it - val fstFile = SFile(files(0)) - val srcdir = fstFile.parent - - // check whether there is a ".flags" file - def convertFlags(f: SFile) = updatePluginPath(f.slurp(), out, srcdir) - val logFile = basename(log.getName) - val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-"))) - val argString = (SFile(log).parent / flagsFileName) ifFile (convertFlags) getOrElse "" - - // slurp local flags (e.g., "A_1.flags") - def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num) - val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List()) - val localFlagsList = if (inGroup.nonEmpty) { - val localArgString = (srcdir / (fstFile.stripExtension + ".flags")) ifFile (convertFlags) getOrElse "" - localArgString.split(' ').toList.filter(_.length > 0) - } else List() - - val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0) ::: localFlagsList - val args = allOpts.toList - - NestUI.verbose("scalac options: "+allOpts) - - val command = new CompilerCommand(args, testSettings) - val global = newGlobal(command.settings, logWriter) - val testRep: ExtConsoleReporter = global.reporter.asInstanceOf[ExtConsoleReporter] - - val testFileFn: (File, FileManager) => TestFile = kind match { - case "pos" => PosTestFile.apply - case "neg" => NegTestFile.apply - case "run" => RunTestFile.apply - case "jvm" => JvmTestFile.apply - case "shootout" => ShootoutTestFile.apply - case "scalap" => ScalapTestFile.apply - case "scalacheck" => ScalaCheckTestFile.apply - case "specialized" => SpecializedTestFile.apply - case "instrumented" => InstrumentedTestFile.apply - case "presentation" => PresentationTestFile.apply - case "ant" => AntTestFile.apply - } - val test: TestFile = testFileFn(files.head, fileManager) - if (!test.defineSettings(command.settings, out.isEmpty)) { - testRep.error(FakePos("partest"), test.flags match { - case Some(flags) => "bad flags: " + flags - case _ => "bad settings: " + command.settings - }) - } - - val toCompile = files map (_.getPath) - - try { - NestUI.verbose("compiling "+toCompile) - NestUI.verbose("with classpath: "+global.classPath.toString) - NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path")) - try { - if (command.shouldStopWithInfo) logWriter append (command getInfoMessage global) - else new global.Run compile toCompile - } catch { - case FatalError(msg) => - testRep.error(null, "fatal error: " + msg) - return CompilerCrashed - } - - testRep.printSummary() - testRep.writer.close() - } - finally logWriter.close() - - if (testRep.hasErrors || command.shouldStopWithInfo) CompileFailed - else CompileSuccess - } -} - -class CompileManager(val fileManager: FileManager) { - private def newCompiler = new DirectCompiler(fileManager) - def attemptCompile(outdir: Option[File], sources: List[File], kind: String, log: File): CompilationOutcome = - newCompiler.compile(outdir, sources, kind, log) -} diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index 0ec3f60bf5..b436675d3a 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -3,20 +3,15 @@ * @author Philipp Haller */ -// $Id$ + package scala.tools.partest package nest -import java.io.{ File, FilenameFilter, IOException, StringWriter } +import java.io.{ FilenameFilter, IOException } import java.net.URI import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd } -import scala.tools.util.PathResolver import scala.tools.nsc.{ io, util } -import util.{ ClassPath } -import io.{ Path, Directory } -import File.pathSeparator -import ClassPath.{ join } import PathResolver.{ Environment, Defaults } class ConsoleFileManager extends FileManager { @@ -55,7 +50,7 @@ class ConsoleFileManager extends FileManager { var JAVAC_CMD = PartestDefaults.javacCmd - NestUI.verbose("CLASSPATH: "+CLASSPATH) + vlog("CLASSPATH: "+CLASSPATH) if (!srcDir.isDirectory) { NestUI.failure("Source directory \"" + srcDir.path + "\" not found") @@ -70,14 +65,14 @@ class ConsoleFileManager extends FileManager { } def findLatest() { - NestUI.verbose("test parent: "+testParent) + vlog("test parent: "+testParent) - def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).toCanonical + def prefixFileWith(parent: File, relPath: String) = (SFile(parent) / relPath).toCanonical def prefixFile(relPath: String) = (testParent / relPath).toCanonical if (!testClasses.isEmpty) { testClassesDir = Path(testClasses.get).toCanonical.toDirectory - NestUI.verbose("Running with classes in "+testClassesDir) + vlog("Running with classes in "+testClassesDir) latestLibFile = testClassesDir / "library" latestActorsFile = testClassesDir / "library" / "actors" @@ -87,7 +82,7 @@ class ConsoleFileManager extends FileManager { } else if (testBuild.isDefined) { val dir = Path(testBuild.get) - NestUI.verbose("Running on "+dir) + vlog("Running on "+dir) latestLibFile = dir / "lib/scala-library.jar" latestActorsFile = dir / "lib/scala-actors.jar" latestReflectFile = dir / "lib/scala-reflect.jar" @@ -96,7 +91,7 @@ class ConsoleFileManager extends FileManager { } else { def setupQuick() { - NestUI.verbose("Running build/quick") + vlog("Running build/quick") latestLibFile = prefixFile("build/quick/classes/library") latestActorsFile = prefixFile("build/quick/classes/library/actors") latestReflectFile = prefixFile("build/quick/classes/reflect") @@ -105,7 +100,7 @@ class ConsoleFileManager extends FileManager { } def setupInst() { - NestUI.verbose("Running dist (installed)") + vlog("Running dist (installed)") val p = testParent.getParentFile latestLibFile = prefixFileWith(p, "lib/scala-library.jar") latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar") @@ -115,7 +110,7 @@ class ConsoleFileManager extends FileManager { } def setupDist() { - NestUI.verbose("Running dists/latest") + vlog("Running dists/latest") latestLibFile = prefixFile("dists/latest/lib/scala-library.jar") latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar") latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar") @@ -124,7 +119,7 @@ class ConsoleFileManager extends FileManager { } def setupPack() { - NestUI.verbose("Running build/pack") + vlog("Running build/pack") latestLibFile = prefixFile("build/pack/lib/scala-library.jar") latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar") latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar") @@ -170,11 +165,13 @@ class ConsoleFileManager extends FileManager { var latestReflectFile: File = _ var latestCompFile: File = _ var latestPartestFile: File = _ - def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile + //def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile + //def latestScalapFile: File = new File(latestLibFile.getParentFile, "scalap.jar") var testClassesDir: Directory = _ // initialize above fields findLatest() + /* def getFiles(kind: String, cond: Path => Boolean): List[File] = { def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _) @@ -187,4 +184,6 @@ class ConsoleFileManager extends FileManager { ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile) } + */ + var latestFjbgFile: File = _ } diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index fd4d52f603..ddd42f5601 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -3,83 +3,110 @@ * @author Philipp Haller */ -// $Id$ - -package scala.tools.partest +package scala.tools +package partest package nest -import java.io.{File, PrintStream, FileOutputStream, BufferedReader, - InputStreamReader, StringWriter, PrintWriter} import utils.Properties._ import scala.tools.nsc.Properties.{ versionMsg, setProp } import scala.tools.nsc.util.CommandLineParser -import scala.tools.nsc.io -import io.{ Path } import scala.collection.{ mutable, immutable } +import PathSettings.srcDir +import TestKinds._ class ConsoleRunner extends DirectRunner { - import PathSettings.{ srcDir, testRoot } - - case class TestSet(kind: String, filter: Path => Boolean, msg: String) - private def stdFilter(p: Path) = p.isDirectory || (p hasExtension "scala") - private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml") - - val testSets = { - List( - TestSet("pos", stdFilter, "Testing compiler (on files whose compilation should succeed)"), - TestSet("neg", stdFilter, "Testing compiler (on files whose compilation should fail)"), - TestSet("run", stdFilter, "Testing interpreter and backend"), - TestSet("jvm", stdFilter, "Testing JVM backend"), - TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"), - TestSet("shootout", stdFilter, "Testing shootout tests"), - TestSet("script", stdFilter, "Testing script tests"), - TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"), - TestSet("scalap", _.isDirectory, "Run scalap decompiler tests"), - TestSet("specialized", stdFilter, "Testing specialized tests"), - TestSet("instrumented", stdFilter, "Testing instrumented tests"), - TestSet("presentation", _.isDirectory, "Testing presentation compiler tests."), - TestSet("ant", antFilter, "Run Ant task tests.") - ) - } + import NestUI._ + import NestUI.color._ + + // So we can ctrl-C a test run and still hear all + // the buffered failure info. + scala.sys addShutdownHook issueSummaryReport() var fileManager: ConsoleFileManager = _ - private var testFiles: List[File] = List() - private val errors = PartestDefaults.errorCount - private val testSetKinds = testSets map (_.kind) - private val testSetArgs = testSets map ("--" + _.kind) - private val testSetArgMap = testSetArgs zip testSets toMap + private var totalTests = 0 + private val passedTests = mutable.ListBuffer[TestState]() + private val failedTests = mutable.ListBuffer[TestState]() + + def comment(s: String) = echo(magenta("# " + s)) + def levyJudgment() = { + if (totalTests == 0) echoMixed("No tests to run.") + else if (elapsedMillis == 0) echoMixed("Test Run ABORTED") + else if (isSuccess) echoPassed("Test Run PASSED") + else echoFailed("Test Run FAILED") + } + + def passFailString(passed: Int, failed: Int, skipped: Int): String = { + val total = passed + failed + skipped + val isSuccess = failed == 0 + def p0 = s"$passed/$total" + def p = ( if (isSuccess) bold(green(p0)) else p0 ) + " passed" + def f = if (failed == 0) "" else bold(red("" + failed)) + " failed" + def s = if (skipped == 0) "" else bold(yellow("" + skipped)) + " skipped" + + oempty(p, f, s) mkString ", " + } + + private var summarizing = false + private var elapsedMillis = 0L + private var expectedFailures = 0 + private def isSuccess = failedTests.size == expectedFailures + + def issueSummaryReport() { + // Don't run twice + if (!summarizing) { + summarizing = true + + val passed0 = passedTests.toList + val failed0 = failedTests.toList + val passed = passed0.size + val failed = failed0.size + val skipped = totalTests - (passed + failed) + val passFail = passFailString(passed, failed, skipped) + val elapsed = if (elapsedMillis > 0) " (elapsed time: " + elapsedString(elapsedMillis) + ")" else "" + val message = passFail + elapsed + + if (failed0.nonEmpty) { + echo(bold(cyan("##### Transcripts from failed tests #####\n"))) + failed0 foreach { state => + comment("partest " + state.testFile) + echo(state.transcriptString + "\n") + } + } - private def printVersion() { NestUI outline (versionMsg + "\n") } + echo(message) + levyJudgment() + } + } private val unaryArgs = List( - "--pack", "--all", "--verbose", "--show-diff", "--show-log", + "--pack", "--all", + "--terse", "--verbose", "--show-diff", "--show-log", "--self-test", "--failed", "--update-check", "--version", "--ansi", "--debug", "--help" - ) ::: testSetArgs + ) ::: standardArgs private val binaryArgs = List( "--grep", "--srcpath", "--buildpath", "--classpath" ) - // true if a test path matches the --grep expression. - private def pathMatchesExpr(path: Path, expr: String) = { - def pred(p: Path) = file2String(p.toFile) contains expr - def greppable(f: Path) = f.isFile && (f hasExtension ("scala", "java")) - def any(d: Path) = d.toDirectory.deepList() exists (f => greppable(f) && pred(f)) - - (path.isFile && pred(path)) || - (path.isDirectory && any(path)) || - (pred(path changeExtension "check")) - } - def main(argstr: String) { val parsed = CommandLineParser(argstr) withUnaryArgs unaryArgs withBinaryArgs binaryArgs - val args = onlyValidTestPaths(parsed.residualArgs) - /* Early return on no args, version, or invalid args */ - if (argstr == "") return NestUI.usage() - if (parsed isSet "--version") return printVersion - if (parsed isSet "--help") return NestUI.usage() + if (parsed isSet "--debug") NestUI.setDebug() + if (parsed isSet "--verbose") NestUI.setVerbose() + if (parsed isSet "--terse") NestUI.setTerse() + + // Early return on no args, version, or invalid args + if (parsed isSet "--version") return echo(versionMsg) + if ((argstr == "") || (parsed isSet "--help")) return NestUI.usage() + + val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath + if (invalid.nonEmpty) { + if (isPartestVerbose) + invalid foreach (p => echoWarning(s"Discarding invalid test path " + p)) + else if (!isPartestTerse) + echoWarning(s"Discarding ${invalid.size} invalid test paths") + } parsed get "--srcpath" foreach (x => setProp("partest.srcdir", x)) @@ -89,144 +116,102 @@ class ConsoleRunner extends DirectRunner { else if (parsed isSet "--pack") new ConsoleFileManager("build/pack") else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest - NestUI._verbose = parsed isSet "--verbose" - fileManager.showDiff = true - // parsed isSet "--show-diff" fileManager.updateCheck = parsed isSet "--update-check" - fileManager.showLog = parsed isSet "--show-log" fileManager.failed = parsed isSet "--failed" - if (parsed isSet "--ansi") NestUI initialize NestUI.MANY - if (parsed isSet "--timeout") fileManager.timeout = parsed("--timeout") - if (parsed isSet "--debug") setProp("partest.debug", "true") + val partestTests = ( + if (parsed isSet "--self-test") TestKinds.testsForPartest + else Nil + ) - def addTestFile(file: File) = { - if (!file.exists) - NestUI.failure("Test file '%s' not found, skipping.\n" format file) - else { - NestUI.verbose("adding test file " + file) - testFiles +:= file - } - } + val grepExpr = parsed get "--grep" getOrElse "" // If --grep is given we suck in every file it matches. - - val grepOption = parsed get "--grep" - val grepPaths = grepOption.toList flatMap { expr => - val subjectDirs = testSetKinds map (srcDir / _ toDirectory) - val testPaths = subjectDirs flatMap (_.list filter stdFilter) - val paths = testPaths filter (p => pathMatchesExpr(p, expr)) - + var grepMessage = "" + val greppedTests = if (grepExpr == "") Nil else { + val paths = grepFor(grepExpr) if (paths.isEmpty) - NestUI.failure("--grep string '%s' matched no tests." format expr) + echoWarning(s"grep string '$grepExpr' matched no tests.\n") - paths map (_.jfile) + paths.sortBy(_.toString) } - val grepMessage = grepOption map (x => "Argument '%s' matched %d test(s)".format(x, grepPaths.size)) getOrElse "" - - grepPaths foreach addTestFile - args foreach (x => addTestFile(new File(x))) - // If no file arguments were given, we assume --all - val enabledTestSets: List[TestSet] = { - val enabledArgs = testSetArgs filter parsed.isSet - - if (args.isEmpty && !(parsed isSet "--grep") && (enabledArgs.isEmpty || (parsed isSet "--all"))) testSets - else enabledArgs map testSetArgMap - } + val isRerun = parsed isSet "--failed" + val rerunTests = if (isRerun) TestKinds.failedTests else Nil + def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests + val givenKinds = standardArgs filter parsed.isSet + val kinds = ( + if (parsed isSet "--all") standardKinds + else if (givenKinds.nonEmpty) givenKinds map (_ stripPrefix "--") + else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all + else Nil + ) + val kindsTests = kinds flatMap testsFor val dir = if (fileManager.testClasses.isDefined) fileManager.testClassesDir else fileManager.testBuildFile getOrElse { fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile } - val vmBin = javaHome + File.separator + "bin" - val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo) - val vmOpts = fileManager.JAVA_OPTS - - NestUI.verbose("enabled test sets: " + (enabledTestSets map (_.kind) mkString " ")) - - List( - "Scala compiler classes in: " + dir, - "Scala version is: " + versionMsg, - "Scalac options are: " + fileManager.SCALAC_OPTS, - "Java binaries in: " + vmBin, - "Java runtime is: " + vmName, - "Java options are: " + vmOpts, - "Source directory is: " + srcDir, - "" - ) foreach (x => NestUI verbose (x + "\n")) - - NestUI.verbose("available processors: " + Runtime.getRuntime().availableProcessors()) - - // Dragged down here so it isn't buried under the banner. - if (grepMessage != "") - NestUI.normal(grepMessage + "\n") - - val ((successes, failures), elapsedMillis) = timed(testCheckAll(enabledTestSets)) - val total = successes + failures - - val elapsedSecs = elapsedMillis/1000 - val elapsedMins = elapsedSecs/60 - val elapsedHrs = elapsedMins/60 - val dispMins = elapsedMins - elapsedHrs * 60 - val dispSecs = elapsedSecs - elapsedMins * 60 - - val dispElapsed = { - def form(num: Long) = if (num < 10) "0"+num else ""+num - form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs) + def testContributors = { + List( + if (partestTests.isEmpty) "" else "partest self-tests", + if (rerunTests.isEmpty) "" else "previously failed tests", + if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories", + if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'", + if (individualTests.isEmpty) "" else "specified tests" + ) filterNot (_ == "") mkString ", " } - if (failures == 0) - NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n") - else - NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n") + def banner = { + val vmBin = javaHome + fileSeparator + "bin" + val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo) + val vmOpts = fileManager.JAVA_OPTS + + s"""|Scala compiler classes in: $dir + |Scala version is: $versionMsg + |Scalac options are: ${fileManager.SCALAC_OPTS mkString " "} + |Java binaries in: $vmBin + |Java runtime is: $vmName + |Java options are: $vmOpts + |Source directory is: $srcDir + |Available processors: ${Runtime.getRuntime().availableProcessors()} + |Java Classpath: ${sys.props("java.class.path")} + """.stripMargin + } - System exit ( if (failures == errors) 0 else 1 ) - } + chatty(banner) - def runTests(testSet: TestSet): (Int, Int) = { - val TestSet(kind, filter, msg) = testSet + val allTests = (miscTests ++ (kinds flatMap testsFor)).distinct + val grouped = (allTests groupBy kindOf).toList sortBy (x => standardKinds indexOf x._1) - fileManager.getFiles(kind, filter) match { - case Nil => NestUI.verbose("test dir empty\n") ; (0, 0) - case files => - NestUI.verbose("test files: "+files) - NestUI.outline("\n"+msg+"\n") - resultsToStatistics(runTestsForFiles(files, kind)) + totalTests = allTests.size + expectedFailures = propOrNone("partest.errors") match { + case Some(num) => num.toInt + case _ => 0 } - } - - /** - * @return (success count, failure count) - */ - def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = { - def kindOf(f: File) = { - (srcDir relativize Path(f).toCanonical).segments match { - case (".." :: "scaladoc" :: xs) => xs.head - case xs => xs.head + val expectedFailureMessage = if (expectedFailures == 0) "" else s" (expecting $expectedFailures to fail)" + echo(s"Selected $totalTests tests drawn from $testContributors$expectedFailureMessage\n") + + val (_, millis) = timed { + for ((kind, paths) <- grouped) { + val num = paths.size + val ss = if (num == 1) "" else "s" + comment(s"starting $num test$ss in $kind") + val results = runTestsForFiles(paths map (_.jfile), kind) + val (passed, failed) = results partition (_.isOk) + + passedTests ++= passed + failedTests ++= failed + if (failed.nonEmpty) { + comment(passFailString(passed.size, failed.size, 0) + " in " + kind) + } + echo("") } } - - val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x)) - invalid foreach (x => NestUI.failure( - "Invalid test file '%s', skipping.\n".format(x) + - "(Test kind '%s' not in known set '%s')".format(kindOf(x), testSetKinds)) - ) - - val grouped = (valid groupBy kindOf).toList sortBy (x => testSetKinds indexOf x._1) - val runTestsFileLists = - for ((kind, files) <- grouped) yield { - NestUI.outline("\nTesting individual files\n") - resultsToStatistics(runTestsForFiles(files, kind)) - } - - if (enabledSets.nonEmpty) - NestUI.verbose("Run sets: "+enabledSets) - - val results = runTestsFileLists ::: (enabledSets map runTests) - - (results map (_._1) sum, results map (_._2) sum) + this.elapsedMillis = millis + issueSummaryReport() + System exit ( if (isSuccess) 0 else 1 ) } } diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java deleted file mode 100644 index f69fc6858b..0000000000 --- a/src/partest/scala/tools/partest/nest/Diff.java +++ /dev/null @@ -1,873 +0,0 @@ - -package scala.tools.partest.nest; - -import java.util.Hashtable; - -/** A class to compare IndexedSeqs of objects. The result of comparison - is a list of <code>change</code> objects which form an - edit script. The objects compared are traditionally lines - of text from two files. Comparison options such as "ignore - whitespace" are implemented by modifying the <code>equals</code> - and <code>hashcode</code> methods for the objects compared. -<p> - The basic algorithm is described in: </br> - "An O(ND) Difference Algorithm and its Variations", Eugene Myers, - Algorithmica Vol. 1 No. 2, 1986, p 251. -<p> - This class outputs different results from GNU diff 1.15 on some - inputs. Our results are actually better (smaller change list, smaller - total size of changes), but it would be nice to know why. Perhaps - there is a memory overwrite bug in GNU diff 1.15. - - @author Stuart D. Gathman, translated from GNU diff 1.15 - Copyright (C) 2000 Business Management Systems, Inc. -<p> - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 1, or (at your option) - any later version. -<p> - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. -<p> - You should have received a copy of the <a href=COPYING.txt> - GNU General Public License</a> - along with this program; if not, write to the Free Software - Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - - */ - -public class Diff { - - /** Prepare to find differences between two arrays. Each element of - the arrays is translated to an "equivalence number" based on - the result of <code>equals</code>. The original Object arrays - are no longer needed for computing the differences. They will - be needed again later to print the results of the comparison as - an edit script, if desired. - */ - public Diff(Object[] a,Object[] b) { - Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length); - filevec[0] = new file_data(a,h); - filevec[1] = new file_data(b,h); - } - - /** 1 more than the maximum equivalence value used for this or its - sibling file. */ - private int equiv_max = 1; - - /** When set to true, the comparison uses a heuristic to speed it up. - With this heuristic, for files with a constant small density - of changes, the algorithm is linear in the file size. */ - public boolean heuristic = false; - - /** When set to true, the algorithm returns a guarranteed minimal - set of changes. This makes things slower, sometimes much slower. */ - public boolean no_discards = false; - - private int[] xvec, yvec; /* IndexedSeqs being compared. */ - private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing - the X coordinate of the point furthest - along the given diagonal in the forward - search of the edit matrix. */ - private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing - the X coordinate of the point furthest - along the given diagonal in the backward - search of the edit matrix. */ - private int fdiagoff, bdiagoff; - private final file_data[] filevec = new file_data[2]; - private int cost; - - /** Find the midpoint of the shortest edit script for a specified - portion of the two files. - - We scan from the beginnings of the files, and simultaneously from the ends, - doing a breadth-first search through the space of edit-sequence. - When the two searches meet, we have found the midpoint of the shortest - edit sequence. - - The value returned is the number of the diagonal on which the midpoint lies. - The diagonal number equals the number of inserted lines minus the number - of deleted lines (counting only lines before the midpoint). - The edit cost is stored into COST; this is the total number of - lines inserted or deleted (counting only lines before the midpoint). - - This function assumes that the first lines of the specified portions - of the two files do not match, and likewise that the last lines do not - match. The caller must trim matching lines from the beginning and end - of the portions it is going to specify. - - Note that if we return the "wrong" diagonal value, or if - the value of bdiag at that diagonal is "wrong", - the worst this can do is cause suboptimal diff output. - It cannot cause incorrect diff output. */ - - private int diag (int xoff, int xlim, int yoff, int ylim) { - final int[] fd = fdiag; // Give the compiler a chance. - final int[] bd = bdiag; // Additional help for the compiler. - final int[] xv = xvec; // Still more help for the compiler. - final int[] yv = yvec; // And more and more . . . - final int dmin = xoff - ylim; // Minimum valid diagonal. - final int dmax = xlim - yoff; // Maximum valid diagonal. - final int fmid = xoff - yoff; // Center diagonal of top-down search. - final int bmid = xlim - ylim; // Center diagonal of bottom-up search. - int fmin = fmid, fmax = fmid; // Limits of top-down search. - int bmin = bmid, bmax = bmid; // Limits of bottom-up search. - /* True if southeast corner is on an odd - diagonal with respect to the northwest. */ - final boolean odd = (fmid - bmid & 1) != 0; - - fd[fdiagoff + fmid] = xoff; - bd[bdiagoff + bmid] = xlim; - - for (int c = 1;; ++c) - { - int d; /* Active diagonal. */ - boolean big_snake = false; - - /* Extend the top-down search by an edit step in each diagonal. */ - if (fmin > dmin) - fd[fdiagoff + --fmin - 1] = -1; - else - ++fmin; - if (fmax < dmax) - fd[fdiagoff + ++fmax + 1] = -1; - else - --fmax; - for (d = fmax; d >= fmin; d -= 2) - { - int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1]; - - if (tlo >= thi) - x = tlo + 1; - else - x = thi; - oldx = x; - y = x - d; - while (x < xlim && y < ylim && xv[x] == yv[y]) { - ++x; ++y; - } - if (x - oldx > 20) - big_snake = true; - fd[fdiagoff + d] = x; - if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d]) - { - cost = 2 * c - 1; - return d; - } - } - - /* Similar extend the bottom-up search. */ - if (bmin > dmin) - bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE; - else - ++bmin; - if (bmax < dmax) - bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE; - else - --bmax; - for (d = bmax; d >= bmin; d -= 2) - { - int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1]; - - if (tlo < thi) - x = tlo; - else - x = thi - 1; - oldx = x; - y = x - d; - while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) { - --x; --y; - } - if (oldx - x > 20) - big_snake = true; - bd[bdiagoff + d] = x; - if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d]) - { - cost = 2 * c; - return d; - } - } - - /* Heuristic: check occasionally for a diagonal that has made - lots of progress compared with the edit distance. - If we have any such, find the one that has made the most - progress and return it as if it had succeeded. - - With this heuristic, for files with a constant small density - of changes, the algorithm is linear in the file size. */ - - if (c > 200 && big_snake && heuristic) - { - int best = 0; - int bestpos = -1; - - for (d = fmax; d >= fmin; d -= 2) - { - int dd = d - fmid; - if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd))) - { - if (fd[fdiagoff + d] * 2 - dd > best - && fd[fdiagoff + d] - xoff > 20 - && fd[fdiagoff + d] - d - yoff > 20) - { - int k; - int x = fd[fdiagoff + d]; - - /* We have a good enough best diagonal; - now insist that it end with a significant snake. */ - for (k = 1; k <= 20; k++) - if (xvec[x - k] != yvec[x - d - k]) - break; - - if (k == 21) - { - best = fd[fdiagoff + d] * 2 - dd; - bestpos = d; - } - } - } - } - if (best > 0) - { - cost = 2 * c - 1; - return bestpos; - } - - best = 0; - for (d = bmax; d >= bmin; d -= 2) - { - int dd = d - bmid; - if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd))) - { - if ((xlim - bd[bdiagoff + d]) * 2 + dd > best - && xlim - bd[bdiagoff + d] > 20 - && ylim - (bd[bdiagoff + d] - d) > 20) - { - /* We have a good enough best diagonal; - now insist that it end with a significant snake. */ - int k; - int x = bd[bdiagoff + d]; - - for (k = 0; k < 20; k++) - if (xvec[x + k] != yvec[x - d + k]) - break; - if (k == 20) - { - best = (xlim - bd[bdiagoff + d]) * 2 + dd; - bestpos = d; - } - } - } - } - if (best > 0) - { - cost = 2 * c - 1; - return bestpos; - } - } - } - } - - /** Compare in detail contiguous subsequences of the two files - which are known, as a whole, to match each other. - - The results are recorded in the IndexedSeqs filevec[N].changed_flag, by - storing a 1 in the element for each line that is an insertion or deletion. - - The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1. - - Note that XLIM, YLIM are exclusive bounds. - All line numbers are origin-0 and discarded lines are not counted. */ - - private void compareseq (int xoff, int xlim, int yoff, int ylim) { - /* Slide down the bottom initial diagonal. */ - while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) { - ++xoff; ++yoff; - } - /* Slide up the top initial diagonal. */ - while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) { - --xlim; --ylim; - } - - /* Handle simple cases. */ - if (xoff == xlim) - while (yoff < ylim) - filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true; - else if (yoff == ylim) - while (xoff < xlim) - filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true; - else - { - /* Find a point of correspondence in the middle of the files. */ - - int d = diag (xoff, xlim, yoff, ylim); - int c = cost; - int f = fdiag[fdiagoff + d]; - int b = bdiag[bdiagoff + d]; - - if (c == 1) - { - /* This should be impossible, because it implies that - one of the two subsequences is empty, - and that case was handled above without calling `diag'. - Let's verify that this is true. */ - throw new IllegalArgumentException("Empty subsequence"); - } - else - { - /* Use that point to split this problem into two subproblems. */ - compareseq (xoff, b, yoff, b - d); - /* This used to use f instead of b, - but that is incorrect! - It is not necessarily the case that diagonal d - has a snake from b to f. */ - compareseq (b, xlim, b - d, ylim); - } - } - } - - /** Discard lines from one file that have no matches in the other file. - */ - - private void discard_confusing_lines() { - filevec[0].discard_confusing_lines(filevec[1]); - filevec[1].discard_confusing_lines(filevec[0]); - } - - private boolean inhibit = false; - - /** Adjust inserts/deletes of blank lines to join changes - as much as possible. - */ - - private void shift_boundaries() { - if (inhibit) - return; - filevec[0].shift_boundaries(filevec[1]); - filevec[1].shift_boundaries(filevec[0]); - } - - public interface ScriptBuilder { - /** Scan the tables of which lines are inserted and deleted, - producing an edit script. - @param changed0 true for lines in first file which do not match 2nd - @param len0 number of lines in first file - @param changed1 true for lines in 2nd file which do not match 1st - @param len1 number of lines in 2nd file - @return a linked list of changes - or null - */ - public change build_script( - boolean[] changed0,int len0, - boolean[] changed1,int len1 - ); - } - - /** Scan the tables of which lines are inserted and deleted, - producing an edit script in reverse order. */ - - static class ReverseScript implements ScriptBuilder { - public change build_script( - final boolean[] changed0,int len0, - final boolean[] changed1,int len1) - { - change script = null; - int i0 = 0, i1 = 0; - while (i0 < len0 || i1 < len1) { - if (changed0[1+i0] || changed1[1+i1]) { - int line0 = i0, line1 = i1; - - /* Find # lines changed here in each file. */ - while (changed0[1+i0]) ++i0; - while (changed1[1+i1]) ++i1; - - /* Record this change. */ - script = new change(line0, line1, i0 - line0, i1 - line1, script); - } - - /* We have reached lines in the two files that match each other. */ - i0++; i1++; - } - - return script; - } - } - - static class ForwardScript implements ScriptBuilder { - /** Scan the tables of which lines are inserted and deleted, - producing an edit script in forward order. */ - public change build_script( - final boolean[] changed0,int len0, - final boolean[] changed1,int len1) - { - change script = null; - int i0 = len0, i1 = len1; - - while (i0 >= 0 || i1 >= 0) - { - if (changed0[i0] || changed1[i1]) - { - int line0 = i0, line1 = i1; - - /* Find # lines changed here in each file. */ - while (changed0[i0]) --i0; - while (changed1[i1]) --i1; - - /* Record this change. */ - script = new change(i0, i1, line0 - i0, line1 - i1, script); - } - - /* We have reached lines in the two files that match each other. */ - i0--; i1--; - } - - return script; - } - } - - /** Standard ScriptBuilders. */ - public final static ScriptBuilder - forwardScript = new ForwardScript(), - reverseScript = new ReverseScript(); - - /* Report the differences of two files. DEPTH is the current directory - depth. */ - public final change diff_2(final boolean reverse) { - return diff(reverse ? reverseScript : forwardScript); - } - - /** Get the results of comparison as an edit script. The script - is described by a list of changes. The standard ScriptBuilder - implementations provide for forward and reverse edit scripts. - Alternate implementations could, for instance, list common elements - instead of differences. - @param bld an object to build the script from change flags - @return the head of a list of changes - */ - public change diff(final ScriptBuilder bld) { - - /* Some lines are obviously insertions or deletions - because they don't match anything. Detect them now, - and avoid even thinking about them in the main comparison algorithm. */ - - discard_confusing_lines (); - - /* Now do the main comparison algorithm, considering just the - undiscarded lines. */ - - xvec = filevec[0].undiscarded; - yvec = filevec[1].undiscarded; - - int diags = - filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3; - fdiag = new int[diags]; - fdiagoff = filevec[1].nondiscarded_lines + 1; - bdiag = new int[diags]; - bdiagoff = filevec[1].nondiscarded_lines + 1; - - compareseq (0, filevec[0].nondiscarded_lines, - 0, filevec[1].nondiscarded_lines); - fdiag = null; - bdiag = null; - - /* Modify the results slightly to make them prettier - in cases where that can validly be done. */ - - shift_boundaries (); - - /* Get the results of comparison in the form of a chain - of `struct change's -- an edit script. */ - return bld.build_script( - filevec[0].changed_flag, - filevec[0].buffered_lines, - filevec[1].changed_flag, - filevec[1].buffered_lines - ); - - } - - /** The result of comparison is an "edit script": a chain of change objects. - Each change represents one place where some lines are deleted - and some are inserted. - - LINE0 and LINE1 are the first affected lines in the two files (origin 0). - DELETED is the number of lines deleted here from file 0. - INSERTED is the number of lines inserted here in file 1. - - If DELETED is 0 then LINE0 is the number of the line before - which the insertion was done; vice versa for INSERTED and LINE1. */ - - public static class change { - /** Previous or next edit command. */ - public change link; - /** # lines of file 1 changed here. */ - public final int inserted; - /** # lines of file 0 changed here. */ - public final int deleted; - /** Line number of 1st deleted line. */ - public final int line0; - /** Line number of 1st inserted line. */ - public final int line1; - - /** Cons an additional entry onto the front of an edit script OLD. - LINE0 and LINE1 are the first affected lines in the two files (origin 0). - DELETED is the number of lines deleted here from file 0. - INSERTED is the number of lines inserted here in file 1. - - If DELETED is 0 then LINE0 is the number of the line before - which the insertion was done; vice versa for INSERTED and LINE1. */ - public change(int line0, int line1, int deleted, int inserted, change old) { - this.line0 = line0; - this.line1 = line1; - this.inserted = inserted; - this.deleted = deleted; - this.link = old; - //System.err.println(line0+","+line1+","+inserted+","+deleted); - } - } - - /** Data on one input file being compared. - */ - - class file_data { - - /** Allocate changed array for the results of comparison. */ - void clear() { - /* Allocate a flag for each line of each file, saying whether that line - is an insertion or deletion. - Allocate an extra element, always zero, at each end of each IndexedSeq. - */ - changed_flag = new boolean[buffered_lines + 2]; - } - - /** Return equiv_count[I] as the number of lines in this file - that fall in equivalence class I. - @return the array of equivalence class counts. - */ - int[] equivCount() { - int[] equiv_count = new int[equiv_max]; - for (int i = 0; i < buffered_lines; ++i) - ++equiv_count[equivs[i]]; - return equiv_count; - } - - /** Discard lines that have no matches in another file. - - A line which is discarded will not be considered by the actual - comparison algorithm; it will be as if that line were not in the file. - The file's `realindexes' table maps virtual line numbers - (which don't count the discarded lines) into real line numbers; - this is how the actual comparison algorithm produces results - that are comprehensible when the discarded lines are counted. -<p> - When we discard a line, we also mark it as a deletion or insertion - so that it will be printed in the output. - @param f the other file - */ - void discard_confusing_lines(file_data f) { - clear(); - /* Set up table of which lines are going to be discarded. */ - final byte[] discarded = discardable(f.equivCount()); - - /* Don't really discard the provisional lines except when they occur - in a run of discardables, with nonprovisionals at the beginning - and end. */ - filterDiscards(discarded); - - /* Actually discard the lines. */ - discard(discarded); - } - - /** Mark to be discarded each line that matches no line of another file. - If a line matches many lines, mark it as provisionally discardable. - @see equivCount() - @param counts The count of each equivalence number for the other file. - @return 0=nondiscardable, 1=discardable or 2=provisionally discardable - for each line - */ - - private byte[] discardable(final int[] counts) { - final int end = buffered_lines; - final byte[] discards = new byte[end]; - final int[] equivs = this.equivs; - int many = 5; - int tem = end / 64; - - /* Multiply MANY by approximate square root of number of lines. - That is the threshold for provisionally discardable lines. */ - while ((tem = tem >> 2) > 0) - many *= 2; - - for (int i = 0; i < end; i++) - { - int nmatch; - if (equivs[i] == 0) - continue; - nmatch = counts[equivs[i]]; - if (nmatch == 0) - discards[i] = 1; - else if (nmatch > many) - discards[i] = 2; - } - return discards; - } - - /** Don't really discard the provisional lines except when they occur - in a run of discardables, with nonprovisionals at the beginning - and end. */ - - private void filterDiscards(final byte[] discards) { - final int end = buffered_lines; - - for (int i = 0; i < end; i++) - { - /* Cancel provisional discards not in middle of run of discards. */ - if (discards[i] == 2) - discards[i] = 0; - else if (discards[i] != 0) - { - /* We have found a nonprovisional discard. */ - int j; - int length; - int provisional = 0; - - /* Find end of this run of discardable lines. - Count how many are provisionally discardable. */ - for (j = i; j < end; j++) - { - if (discards[j] == 0) - break; - if (discards[j] == 2) - ++provisional; - } - - /* Cancel provisional discards at end, and shrink the run. */ - while (j > i && discards[j - 1] == 2) { - discards[--j] = 0; --provisional; - } - - /* Now we have the length of a run of discardable lines - whose first and last are not provisional. */ - length = j - i; - - /* If 1/4 of the lines in the run are provisional, - cancel discarding of all provisional lines in the run. */ - if (provisional * 4 > length) - { - while (j > i) - if (discards[--j] == 2) - discards[j] = 0; - } - else - { - int consec; - int minimum = 1; - int tem = length / 4; - - /* MINIMUM is approximate square root of LENGTH/4. - A subrun of two or more provisionals can stand - when LENGTH is at least 16. - A subrun of 4 or more can stand when LENGTH >= 64. */ - while ((tem = tem >> 2) > 0) - minimum *= 2; - minimum++; - - /* Cancel any subrun of MINIMUM or more provisionals - within the larger run. */ - for (j = 0, consec = 0; j < length; j++) - if (discards[i + j] != 2) - consec = 0; - else if (minimum == ++consec) - /* Back up to start of subrun, to cancel it all. */ - j -= consec; - else if (minimum < consec) - discards[i + j] = 0; - - /* Scan from beginning of run - until we find 3 or more nonprovisionals in a row - or until the first nonprovisional at least 8 lines in. - Until that point, cancel any provisionals. */ - for (j = 0, consec = 0; j < length; j++) - { - if (j >= 8 && discards[i + j] == 1) - break; - if (discards[i + j] == 2) { - consec = 0; discards[i + j] = 0; - } - else if (discards[i + j] == 0) - consec = 0; - else - consec++; - if (consec == 3) - break; - } - - /* I advances to the last line of the run. */ - i += length - 1; - - /* Same thing, from end. */ - for (j = 0, consec = 0; j < length; j++) - { - if (j >= 8 && discards[i - j] == 1) - break; - if (discards[i - j] == 2) { - consec = 0; discards[i - j] = 0; - } - else if (discards[i - j] == 0) - consec = 0; - else - consec++; - if (consec == 3) - break; - } - } - } - } - } - - /** Actually discard the lines. - @param discards flags lines to be discarded - */ - private void discard(final byte[] discards) { - final int end = buffered_lines; - int j = 0; - for (int i = 0; i < end; ++i) - if (no_discards || discards[i] == 0) - { - undiscarded[j] = equivs[i]; - realindexes[j++] = i; - } - else - changed_flag[1+i] = true; - nondiscarded_lines = j; - } - - file_data(Object[] data, Hashtable<Object, Integer> h) { - buffered_lines = data.length; - - equivs = new int[buffered_lines]; - undiscarded = new int[buffered_lines]; - realindexes = new int[buffered_lines]; - - for (int i = 0; i < data.length; ++i) { - Integer ir = h.get(data[i]); - if (ir == null) - h.put(data[i], new Integer(equivs[i] = equiv_max++)); - else - equivs[i] = ir.intValue(); - } - } - - /** Adjust inserts/deletes of blank lines to join changes - as much as possible. - - We do something when a run of changed lines include a blank - line at one end and have an excluded blank line at the other. - We are free to choose which blank line is included. - `compareseq' always chooses the one at the beginning, - but usually it is cleaner to consider the following blank line - to be the "change". The only exception is if the preceding blank line - would join this change to other changes. - @param f the file being compared against - */ - - void shift_boundaries(file_data f) { - final boolean[] changed = changed_flag; - final boolean[] other_changed = f.changed_flag; - int i = 0; - int j = 0; - int i_end = buffered_lines; - int preceding = -1; - int other_preceding = -1; - - for (;;) - { - int start, end, other_start; - - /* Scan forwards to find beginning of another run of changes. - Also keep track of the corresponding point in the other file. */ - - while (i < i_end && !changed[1+i]) - { - while (other_changed[1+j++]) - /* Non-corresponding lines in the other file - will count as the preceding batch of changes. */ - other_preceding = j; - i++; - } - - if (i == i_end) - break; - - start = i; - other_start = j; - - for (;;) - { - /* Now find the end of this run of changes. */ - - while (i < i_end && changed[1+i]) i++; - end = i; - - /* If the first changed line matches the following unchanged one, - and this run does not follow right after a previous run, - and there are no lines deleted from the other file here, - then classify the first changed line as unchanged - and the following line as changed in its place. */ - - /* You might ask, how could this run follow right after another? - Only because the previous run was shifted here. */ - - if (end != i_end - && equivs[start] == equivs[end] - && !other_changed[1+j] - && end != i_end - && !((preceding >= 0 && start == preceding) - || (other_preceding >= 0 - && other_start == other_preceding))) - { - changed[1+end++] = true; - changed[1+start++] = false; - ++i; - /* Since one line-that-matches is now before this run - instead of after, we must advance in the other file - to keep in synch. */ - ++j; - } - else - break; - } - - preceding = i; - other_preceding = j; - } - } - - /** Number of elements (lines) in this file. */ - final int buffered_lines; - - /** IndexedSeq, indexed by line number, containing an equivalence code for - each line. It is this IndexedSeq that is actually compared with that - of another file to generate differences. */ - private final int[] equivs; - - /** IndexedSeq, like the previous one except that - the elements for discarded lines have been squeezed out. */ - final int[] undiscarded; - - /** IndexedSeq mapping virtual line numbers (not counting discarded lines) - to real ones (counting those lines). Both are origin-0. */ - final int[] realindexes; - - /** Total number of nondiscarded lines. */ - int nondiscarded_lines; - - /** Array, indexed by real origin-1 line number, - containing true for a line that is an insertion or a deletion. - The results of comparison are stored here. */ - boolean[] changed_flag; - - } -} diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java deleted file mode 100644 index 31f9a1bc79..0000000000 --- a/src/partest/scala/tools/partest/nest/DiffPrint.java +++ /dev/null @@ -1,606 +0,0 @@ - -package scala.tools.partest.nest; - -import java.io.*; -import java.util.Vector; -import java.util.Date; -//import com.objectspace.jgl.predicates.UnaryPredicate; - -interface UnaryPredicate { - boolean execute(Object obj); -} - -/** A simple framework for printing change lists produced by <code>Diff</code>. - @see bmsi.util.Diff - @author Stuart D. Gathman - Copyright (C) 2000 Business Management Systems, Inc. -<p> - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 1, or (at your option) - any later version. -<p> - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. -<p> - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - */ -public class DiffPrint { - /** A Base class for printing edit scripts produced by Diff. - This class divides the change list into "hunks", and calls - <code>print_hunk</code> for each hunk. Various utility methods - are provided as well. - */ - public static abstract class Base { - protected Base(Object[] a,Object[] b, Writer w) { - outfile = new PrintWriter(w); - file0 = a; - file1 = b; - } - /** Set to ignore certain kinds of lines when printing - an edit script. For example, ignoring blank lines or comments. - */ - protected UnaryPredicate ignore = null; - - /** Set to the lines of the files being compared. - */ - protected Object[] file0, file1; - - /** Divide SCRIPT into pieces by calling HUNKFUN and - print each piece with PRINTFUN. - Both functions take one arg, an edit script. - - PRINTFUN takes a subscript which belongs together (with a null - link at the end) and prints it. */ - public void print_script(Diff.change script) { - Diff.change next = script; - - while (next != null) - { - Diff.change t, end; - - /* Find a set of changes that belong together. */ - t = next; - end = hunkfun(next); - - /* Disconnect them from the rest of the changes, - making them a hunk, and remember the rest for next iteration. */ - next = end.link; - end.link = null; - //if (DEBUG) - // debug_script(t); - - /* Print this hunk. */ - print_hunk(t); - - /* Reconnect the script so it will all be freed properly. */ - end.link = next; - } - outfile.flush(); - } - - /** Called with the tail of the script - and returns the last link that belongs together with the start - of the tail. */ - - protected Diff.change hunkfun(Diff.change hunk) { - return hunk; - } - - protected int first0, last0, first1, last1, deletes, inserts; - protected PrintWriter outfile; - - /** Look at a hunk of edit script and report the range of lines in each file - that it applies to. HUNK is the start of the hunk, which is a chain - of `struct change'. The first and last line numbers of file 0 are stored - in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1. - Note that these are internal line numbers that count from 0. - - If no lines from file 0 are deleted, then FIRST0 is LAST0+1. - - Also set *DELETES nonzero if any lines of file 0 are deleted - and set *INSERTS nonzero if any lines of file 1 are inserted. - If only ignorable lines are inserted or deleted, both are - set to 0. */ - - protected void analyze_hunk(Diff.change hunk) { - int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0; - int i; - Diff.change next; - boolean nontrivial = (ignore == null); - - show_from = show_to = 0; - - f0 = hunk.line0; - f1 = hunk.line1; - - for (next = hunk; next != null; next = next.link) - { - l0 = next.line0 + next.deleted - 1; - l1 = next.line1 + next.inserted - 1; - show_from += next.deleted; - show_to += next.inserted; - for (i = next.line0; i <= l0 && ! nontrivial; i++) - if (!ignore.execute(file0[i])) - nontrivial = true; - for (i = next.line1; i <= l1 && ! nontrivial; i++) - if (!ignore.execute(file1[i])) - nontrivial = true; - } - - first0 = f0; - last0 = l0; - first1 = f1; - last1 = l1; - - /* If all inserted or deleted lines are ignorable, - tell the caller to ignore this hunk. */ - - if (!nontrivial) - show_from = show_to = 0; - - deletes = show_from; - inserts = show_to; - } - - /** Print the script header which identifies the files compared. */ - protected void print_header(String filea, String fileb) { } - - protected abstract void print_hunk(Diff.change hunk); - - protected void print_1_line(String pre,Object linbuf) { - outfile.println(pre + linbuf.toString()); - } - - /** Print a pair of line numbers with SEPCHAR, translated for file FILE. - If the two numbers are identical, print just one number. - - Args A and B are internal line numbers. - We print the translated (real) line numbers. */ - - protected void print_number_range (char sepchar, int a, int b) { - /* Note: we can have B < A in the case of a range of no lines. - In this case, we should print the line number before the range, - which is B. */ - if (++b > ++a) - outfile.print("" + a + sepchar + b); - else - outfile.print(b); - } - - public static char change_letter(int inserts, int deletes) { - if (inserts == 0) - return 'd'; - else if (deletes == 0) - return 'a'; - else - return 'c'; - } - } - - /** Print a change list in the standard diff format. - */ - public static class NormalPrint extends Base { - - public NormalPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - /** Print a hunk of a normal diff. - This is a contiguous portion of a complete edit script, - describing changes in consecutive lines. */ - - protected void print_hunk (Diff.change hunk) { - - /* Determine range of line numbers involved in each file. */ - analyze_hunk(hunk); - if (deletes == 0 && inserts == 0) - return; - - /* Print out the line number header for this hunk */ - print_number_range (',', first0, last0); - outfile.print(change_letter(inserts, deletes)); - print_number_range (',', first1, last1); - outfile.println(); - - /* Print the lines that the first file has. */ - if (deletes != 0) - for (int i = first0; i <= last0; i++) - print_1_line ("< ", file0[i]); - - if (inserts != 0 && deletes != 0) - outfile.println("---"); - - /* Print the lines that the second file has. */ - if (inserts != 0) - for (int i = first1; i <= last1; i++) - print_1_line ("> ", file1[i]); - } - } - - /** Prints an edit script in a format suitable for input to <code>ed</code>. - The edit script must be generated with the reverse option to - be useful as actual <code>ed</code> input. - */ - public static class EdPrint extends Base { - - public EdPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - /** Print a hunk of an ed diff */ - protected void print_hunk(Diff.change hunk) { - - /* Determine range of line numbers involved in each file. */ - analyze_hunk (hunk); - if (deletes == 0 && inserts == 0) - return; - - /* Print out the line number header for this hunk */ - print_number_range (',', first0, last0); - outfile.println(change_letter(inserts, deletes)); - - /* Print new/changed lines from second file, if needed */ - if (inserts != 0) - { - boolean inserting = true; - for (int i = first1; i <= last1; i++) - { - /* Resume the insert, if we stopped. */ - if (! inserting) - outfile.println(i - first1 + first0 + "a"); - inserting = true; - - /* If the file's line is just a dot, it would confuse `ed'. - So output it with a double dot, and set the flag LEADING_DOT - so that we will output another ed-command later - to change the double dot into a single dot. */ - - if (".".equals(file1[i])) - { - outfile.println(".."); - outfile.println("."); - /* Now change that double dot to the desired single dot. */ - outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././"); - inserting = false; - } - else - /* Line is not `.', so output it unmodified. */ - print_1_line ("", file1[i]); - } - - /* End insert mode, if we are still in it. */ - if (inserting) - outfile.println("."); - } - } - } - - /** Prints an edit script in context diff format. This and its - 'unified' variation is used for source code patches. - */ - public static class ContextPrint extends Base { - - protected int context = 3; - - public ContextPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - protected void print_context_label (String mark, File inf, String label) { - if (label != null) - outfile.println(mark + ' ' + label); - else if (inf.lastModified() > 0) - // FIXME: use DateFormat to get precise format needed. - outfile.println( - mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified()) - ); - else - /* Don't pretend that standard input is ancient. */ - outfile.println(mark + ' ' + inf.getPath()); - } - - public void print_header(String filea,String fileb) { - print_context_label ("***", new File(filea), filea); - print_context_label ("---", new File(fileb), fileb); - } - - /** If function_regexp defined, search for start of function. */ - private String find_function(Object[] lines, int start) { - return null; - } - - protected void print_function(Object[] file,int start) { - String function = find_function (file0, first0); - if (function != null) { - outfile.print(" "); - outfile.print( - (function.length() < 40) ? function : function.substring(0,40) - ); - } - } - - protected void print_hunk(Diff.change hunk) { - - /* Determine range of line numbers involved in each file. */ - - analyze_hunk (hunk); - - if (deletes == 0 && inserts == 0) - return; - - /* Include a context's width before and after. */ - - first0 = Math.max(first0 - context, 0); - first1 = Math.max(first1 - context, 0); - last0 = Math.min(last0 + context, file0.length - 1); - last1 = Math.min(last1 + context, file1.length - 1); - - - outfile.print("***************"); - - /* If we looked for and found a function this is part of, - include its name in the header of the diff section. */ - print_function (file0, first0); - - outfile.println(); - outfile.print("*** "); - print_number_range (',', first0, last0); - outfile.println(" ****"); - - if (deletes != 0) { - Diff.change next = hunk; - - for (int i = first0; i <= last0; i++) { - /* Skip past changes that apply (in file 0) - only to lines before line I. */ - - while (next != null && next.line0 + next.deleted <= i) - next = next.link; - - /* Compute the marking for line I. */ - - String prefix = " "; - if (next != null && next.line0 <= i) - /* The change NEXT covers this line. - If lines were inserted here in file 1, this is "changed". - Otherwise it is "deleted". */ - prefix = (next.inserted > 0) ? "!" : "-"; - - print_1_line (prefix, file0[i]); - } - } - - outfile.print("--- "); - print_number_range (',', first1, last1); - outfile.println(" ----"); - - if (inserts != 0) { - Diff.change next = hunk; - - for (int i = first1; i <= last1; i++) { - /* Skip past changes that apply (in file 1) - only to lines before line I. */ - - while (next != null && next.line1 + next.inserted <= i) - next = next.link; - - /* Compute the marking for line I. */ - - String prefix = " "; - if (next != null && next.line1 <= i) - /* The change NEXT covers this line. - If lines were deleted here in file 0, this is "changed". - Otherwise it is "inserted". */ - prefix = (next.deleted > 0) ? "!" : "+"; - - print_1_line (prefix, file1[i]); - } - } - } - } - - /** Prints an edit script in context diff format. This and its - 'unified' variation is used for source code patches. - */ - public static class UnifiedPrint extends ContextPrint { - - public UnifiedPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - public void print_header(String filea,String fileb) { - print_context_label ("---", new File(filea), filea); - print_context_label ("+++", new File(fileb), fileb); - } - - private void print_number_range (int a, int b) { - //translate_range (file, a, b, &trans_a, &trans_b); - - /* Note: we can have B < A in the case of a range of no lines. - In this case, we should print the line number before the range, - which is B. */ - if (b < a) - outfile.print(b + ",0"); - else - super.print_number_range(',',a,b); - } - - protected void print_hunk(Diff.change hunk) { - /* Determine range of line numbers involved in each file. */ - analyze_hunk (hunk); - - if (deletes == 0 && inserts == 0) - return; - - /* Include a context's width before and after. */ - - first0 = Math.max(first0 - context, 0); - first1 = Math.max(first1 - context, 0); - last0 = Math.min(last0 + context, file0.length - 1); - last1 = Math.min(last1 + context, file1.length - 1); - - - - outfile.print("@@ -"); - print_number_range (first0, last0); - outfile.print(" +"); - print_number_range (first1, last1); - outfile.print(" @@"); - - /* If we looked for and found a function this is part of, - include its name in the header of the diff section. */ - print_function(file0,first0); - - outfile.println(); - - Diff.change next = hunk; - int i = first0; - int j = first1; - - while (i <= last0 || j <= last1) { - - /* If the line isn't a difference, output the context from file 0. */ - - if (next == null || i < next.line0) { - outfile.print(' '); - print_1_line ("", file0[i++]); - j++; - } - else { - /* For each difference, first output the deleted part. */ - - int k = next.deleted; - while (k-- > 0) { - outfile.print('-'); - print_1_line ("", file0[i++]); - } - - /* Then output the inserted part. */ - - k = next.inserted; - while (k-- > 0) { - outfile.print('+'); - print_1_line ("", file1[j++]); - } - - /* We're done with this hunk, so on to the next! */ - - next = next.link; - } - } - } - } - - - /** Read a text file into an array of String. This provides basic diff - functionality. A more advanced diff utility will use specialized - objects to represent the text lines, with options to, for example, - convert sequences of whitespace to a single space for comparison - purposes. - */ - static String[] slurp(String file) throws IOException { - BufferedReader rdr = new BufferedReader(new FileReader(file)); - Vector<String> s = new Vector<String>(); - for (;;) { - String line = rdr.readLine(); - if (line == null) break; - s.addElement(line); - } - String[] a = new String[s.size()]; - s.copyInto(a); - return a; - } - - public static void main(String[] argv) throws IOException { - String filea = argv[argv.length - 2]; - String fileb = argv[argv.length - 1]; - String[] a = slurp(filea); - String[] b = slurp(fileb); - Diff d = new Diff(a,b); - char style = 'n'; - for (int i = 0; i < argv.length - 2; ++i) { - String f = argv[i]; - if (f.startsWith("-")) { - for (int j = 1; j < f.length(); ++j) { - switch (f.charAt(j)) { - case 'e': // Ed style - style = 'e'; break; - case 'c': // Context diff - style = 'c'; break; - case 'u': - style = 'u'; break; - } - } - } - } - boolean reverse = style == 'e'; - Diff.change script = d.diff_2(reverse); - if (script == null) - System.err.println("No differences"); - else { - Base p; - Writer w = new OutputStreamWriter(System.out); - switch (style) { - case 'e': - p = new EdPrint(a,b,w); break; - case 'c': - p = new ContextPrint(a,b,w); break; - case 'u': - p = new UnifiedPrint(a,b,w); break; - default: - p = new NormalPrint(a,b,w); - } - p.print_header(filea,fileb); - p.print_script(script); - } - } - - public static void doDiff(String[] argv, Writer w) throws IOException { - String filea = argv[argv.length - 2]; - String fileb = argv[argv.length - 1]; - String[] a = slurp(filea); - String[] b = slurp(fileb); - Diff d = new Diff(a,b); - char style = 'n'; - for (int i = 0; i < argv.length - 2; ++i) { - String f = argv[i]; - if (f.startsWith("-")) { - for (int j = 1; j < f.length(); ++j) { - switch (f.charAt(j)) { - case 'e': // Ed style - style = 'e'; break; - case 'c': // Context diff - style = 'c'; break; - case 'u': - style = 'u'; break; - } - } - } - } - boolean reverse = style == 'e'; - Diff.change script = d.diff_2(reverse); - if (script == null) - w.write("No differences\n"); - else { - Base p; - switch (style) { - case 'e': - p = new EdPrint(a,b,w); break; - case 'c': - p = new ContextPrint(a,b,w); break; - case 'u': - p = new UnifiedPrint(a,b,w); break; - default: - p = new NormalPrint(a,b,w); - } - p.print_header(filea,fileb); - p.print_script(script); - } - } - -} diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala new file mode 100644 index 0000000000..650b6c35c8 --- /dev/null +++ b/src/partest/scala/tools/partest/nest/DirectCompiler.scala @@ -0,0 +1,105 @@ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + * @author Philipp Haller + */ + +package scala.tools.partest +package nest + +import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError } +import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter } +import scala.tools.nsc.util.{ FakePos, stackTraceString } +import scala.tools.nsc.Properties.{ setProp, propOrEmpty } +import scala.reflect.io.AbstractFile +import scala.reflect.internal.util.Position +import java.io.{ BufferedReader, PrintWriter, FileReader, Writer, FileWriter } + +class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) { + shortname = true + // override def error(pos: Position, msg: String): Unit +} + +class TestSettings(cp: String, error: String => Unit) extends Settings(error) { + def this(cp: String) = this(cp, _ => ()) + + nowarnings.value = false + encoding.value = "UTF-8" + classpath.value = cp +} + +class PartestGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) { + // override def abort(msg: String): Nothing + // override def globalError(msg: String): Unit + // override def supplementErrorMessage(msg: String): String +} +class DirectCompiler(val fileManager: FileManager) { + def newGlobal(settings: Settings, reporter: Reporter): PartestGlobal = + new PartestGlobal(settings, reporter) + + def newGlobal(settings: Settings, logWriter: FileWriter): Global = + newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter))) + + def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB) + def newSettings(outdir: String): TestSettings = { + val cp = ClassPath.join(fileManager.LATEST_LIB, outdir) + val s = new TestSettings(cp) + s.outdir.value = outdir + s + } + + def compile(runner: Runner, opts0: List[String], sources: List[File]): TestState = { + import runner._ + + val testSettings = new TestSettings(ClassPath.join(fileManager.LATEST_LIB, outDir.getPath)) + val logWriter = new FileWriter(logFile) + val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile + val opts = fileManager.updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir) + val command = new CompilerCommand(opts, testSettings) + val global = newGlobal(testSettings, logWriter) + val reporter = global.reporter.asInstanceOf[ExtConsoleReporter] + def errorCount = reporter.ERROR.count + + def defineSettings(s: Settings) = { + s.outputDirs setSingleOutput outDir.getPath + // adding codelib.jar to the classpath + // codelib provides the possibility to override standard reify + // this shields the massive amount of reification tests from changes in the API + prependToClasspaths(s, codelib) + s.classpath append fileManager.CLASSPATH // adding this why? + + // add the instrumented library version to classpath + if (kind == "specialized") + prependToClasspaths(s, speclib) + + // check that option processing succeeded + opts0.isEmpty || command.ok + } + + if (!defineSettings(testSettings)) + if (opts0.isEmpty) + reporter.error(null, s"bad settings: $testSettings") + else + reporter.error(null, opts0.mkString("bad options: ", space, "")) + + def ids = sources.map(_.testIdent) mkString space + vlog(s"% scalac $ids") + + def execCompile() = + if (command.shouldStopWithInfo) { + logWriter append (command getInfoMessage global) + runner genFail "compilation stopped with info" + } else { + new global.Run compile sources.map(_.getPath) + if (!reporter.hasErrors) runner.genPass() + else { + reporter.printSummary() + reporter.writer.close() + runner.genFail(s"compilation failed with $errorCount errors") + } + } + + try { execCompile() } + catch { case t: Throwable => reporter.error(null, t.getMessage) ; runner.genCrash(t) } + finally { logWriter.close() } + } +} diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala index 7e4c3b842c..49dd39c344 100644 --- a/src/partest/scala/tools/partest/nest/DirectRunner.scala +++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala @@ -3,14 +3,12 @@ * @author Philipp Haller */ -// $Id$ - package scala.tools.partest package nest import java.io.File import scala.util.Properties.setProp -import scala.tools.nsc.util.ScalaClassLoader +import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional } import scala.tools.nsc.io.Path import scala.collection.{ mutable, immutable } import java.util.concurrent._ @@ -22,41 +20,33 @@ trait DirectRunner { import PartestDefaults.numThreads - def denotesTestFile(arg: String) = Path(arg).hasExtension("scala", "res", "xml") - def denotesTestDir(arg: String) = Path(arg).ifDirectory(_.files.nonEmpty) exists (x => x) - def denotesTestPath(arg: String) = denotesTestDir(arg) || denotesTestFile(arg) - - /** No duplicate, no empty directories, don't mess with this unless - * you like partest hangs. - */ - def onlyValidTestPaths[T](args: List[T]): List[T] = { - args.distinct filter (arg => denotesTestPath("" + arg) || { - NestUI.warning("Discarding invalid test path '%s'\n" format arg) - false - }) - } - def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = { - System.setProperty("line.separator", "\n") + Thread.setDefaultUncaughtExceptionHandler( + new Thread.UncaughtExceptionHandler { + def uncaughtException(thread: Thread, t: Throwable) { + val t1 = Exceptional unwrap t + System.err.println(s"Uncaught exception on thread $thread: $t1") + t1.printStackTrace() + } + } + ) + def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = { - val allUrls = PathSettings.scalaCheck.toURL :: fileManager.latestUrls - val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(allUrls) - val kindFiles = onlyValidTestPaths(_kindFiles) - val pool = Executors.newFixedThreadPool(numThreads) - val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader)) - val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap + NestUI.resetTestNumber() - pool.shutdown() + val allUrls = PathSettings.scalaCheck.toURL :: fileManager.latestUrls + val parentClassLoader = ScalaClassLoader fromURLs allUrls + val pool = Executors newFixedThreadPool numThreads + val manager = new RunnerManager(kind, fileManager, TestRunParams(parentClassLoader)) + val futures = kindFiles map (f => pool submit callable(manager runTest f)) + pool.shutdown() try if (!pool.awaitTermination(4, TimeUnit.HOURS)) - NestUI.warning("Thread pool timeout elapsed before all tests were complete!") + NestUI warning "Thread pool timeout elapsed before all tests were complete!" catch { case t: InterruptedException => - NestUI.warning("Thread pool was interrupted") + NestUI warning "Thread pool was interrupted" t.printStackTrace() } - for ((file, future) <- futures) yield { - val state = if (future.isCancelled) TestState.Timeout else future.get - (file.getAbsolutePath, state) - } + futures map (_.get) } } diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index a32c56e973..25371b7d54 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -12,39 +12,36 @@ import java.io.{File, FilenameFilter, IOException, StringWriter, FileInputStream, FileOutputStream, BufferedReader, FileReader, PrintWriter, FileWriter} import java.net.URI -import scala.tools.nsc.io.{ Path, Directory, File => SFile } +import scala.reflect.io.AbstractFile import scala.collection.mutable trait FileUtil { /** - * Compares two files using a Java implementation of the GNU diff - * available at http://www.bmsi.com/java/#diff. + * Compares two files using difflib to produce a unified diff. * * @param f1 the first file to be compared * @param f2 the second file to be compared - * @return the text difference between the compared files + * @return the unified diff of the compared files or the empty string if they're equal */ def compareFiles(f1: File, f2: File): String = { - val diffWriter = new StringWriter - val args = Array(f1.getAbsolutePath(), f2.getAbsolutePath()) - - DiffPrint.doDiff(args, diffWriter) - val res = diffWriter.toString - if (res startsWith "No") "" else res + compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName) } - def compareContents(lines1: Seq[String], lines2: Seq[String]): String = { - val xs1 = lines1.toArray[AnyRef] - val xs2 = lines2.toArray[AnyRef] - - val diff = new Diff(xs1, xs2) - val change = diff.diff_2(false) - val writer = new StringWriter - val p = new DiffPrint.NormalPrint(xs1, xs2, writer) - - p.print_script(change) - val res = writer.toString - if (res startsWith "No ") "" - else res + + /** + * Compares two lists of lines using difflib to produce a unified diff. + * + * @param origLines the first seq of lines to be compared + * @param newLines the second seq of lines to be compared + * @param origName file name to be used in unified diff for `origLines` + * @param newName file name to be used in unified diff for `newLines` + * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal + */ + def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = { + import collection.JavaConverters._ + + val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava) + if (diff.getDeltas.isEmpty) "" + else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n") } } object FileUtil extends FileUtil { } @@ -65,16 +62,19 @@ trait FileManager extends FileUtil { var LATEST_ACTORS: String protected def relativeToLibrary(what: String): String = { - if (LATEST_LIB endsWith ".jar") { - (SFile(LATEST_LIB).parent / s"scala-$what.jar").toAbsolute.path - } - else { + def jarname = if (what startsWith "scala") s"$what.jar" else s"scala-$what.jar" + if (LATEST_LIB endsWith ".jar") + (SFile(LATEST_LIB).parent / jarname).toAbsolute.path + else (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path - } } def latestScaladoc = relativeToLibrary("scaladoc") def latestInteractive = relativeToLibrary("interactive") - def latestPaths = List(LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS, latestScaladoc, latestInteractive) + def latestScalapFile = relativeToLibrary("scalap") + def latestPaths = List( + LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS, + latestScalapFile, latestScaladoc, latestInteractive + ) def latestFiles = latestPaths map (p => new java.io.File(p)) def latestUrls = latestFiles map (_.toURI.toURL) @@ -131,4 +131,34 @@ trait FileManager extends FileUtil { f.printlnAll(f.lines.toList map replace: _*) } + + /** Massage args to merge plugins and fix paths. + * Plugin path can be relative to test root, or cwd is out. + * While we're at it, mix in the baseline options, too. + * That's how ant passes in the plugins dir. + */ + def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): List[String] = { + val dir = testRootDir + // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual) + def pathOrCwd(p: String) = + if (p == ".") { + val plugxml = "scalac-plugin.xml" + val pout = if (out.isVirtual) Directory.makeTemp() else Path(out.path) + val srcpath = Path(srcdir.path) + val pd = (srcpath / plugxml).toFile + if (pd.exists) pd copyTo (pout / plugxml) + pout + } else Path(p) + def absolutize(path: String) = pathOrCwd(path) match { + case x if x.isAbsolute => x.path + case x => (dir / x).toAbsolute.path + } + + val xprefix = "-Xplugin:" + val (xplugs, others) = args partition (_ startsWith xprefix) + val Xplugin = if (xplugs.isEmpty) Nil else List(xprefix + + (xplugs map (_ stripPrefix xprefix) flatMap (_ split pathSeparator) map absolutize mkString pathSeparator) + ) + SCALAC_OPTS.toList ::: others ::: Xplugin + } } diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala index df90b22448..2e203bfd91 100644 --- a/src/partest/scala/tools/partest/nest/NestUI.scala +++ b/src/partest/scala/tools/partest/nest/NestUI.scala @@ -3,14 +3,37 @@ * @author Philipp Haller */ -// $Id$ - package scala.tools.partest package nest import java.io.PrintWriter +class Colors(enabled: => Boolean) { + import Console._ + + val bold = colored(BOLD) + val yellow = colored(YELLOW) + val green = colored(GREEN) + val blue = colored(BLUE) + val red = colored(RED) + val red_b = colored(RED_B) + val green_b = colored(GREEN_B) + val cyan = colored(CYAN) + val magenta = colored(MAGENTA) + + private def colored(code: String): String => String = + s => if (enabled) code + s + RESET else s +} + object NestUI { + private val testNum = new java.util.concurrent.atomic.AtomicInteger(1) + // @volatile private var testNumber = 1 + private def testNumber = "%3d" format testNum.getAndIncrement() + def resetTestNumber() = testNum set 1 + + var colorEnabled = sys.props contains "partest.colors" + val color = new Colors(colorEnabled) + import color._ val NONE = 0 val SOME = 1 @@ -22,11 +45,57 @@ object NestUI { private var _warning = "" private var _default = "" + private var dotCount = 0 + private val DotWidth = 72 + + def leftFlush() { + if (dotCount != 0) { + normal("\n") + dotCount = 0 + } + } + + def statusLine(state: TestState) = { + import state._ + val word = bold( + if (isSkipped) yellow("--") + else if (isOk) green("ok") + else red("!!") + ) + word + f" $testNumber%3s - $testIdent%-40s$reasonString" + } + + def reportTest(state: TestState) = { + if (isTerse && state.isOk) { + if (dotCount >= DotWidth) { + outline("\n.") + dotCount = 1 + } + else { + outline(".") + dotCount += 1 + } + } + else echo(statusLine(state)) + } + + def echo(message: String): Unit = synchronized { + leftFlush() + print(message + "\n") + } + def chatty(msg: String) = if (isVerbose) echo(msg) + + def echoSkipped(msg: String) = echo(yellow(msg)) + def echoPassed(msg: String) = echo(bold(green(msg))) + def echoFailed(msg: String) = echo(bold(red(msg))) + def echoMixed(msg: String) = echo(bold(yellow(msg))) + def echoWarning(msg: String) = echo(bold(red(msg))) + def initialize(number: Int) = number match { case MANY => _outline = Console.BOLD + Console.BLACK _success = Console.BOLD + Console.GREEN - _failure = Console.BOLD + Console.RED + _failure = Console.BOLD + Console.RED _warning = Console.BOLD + Console.YELLOW _default = Console.RESET case SOME => @@ -61,10 +130,7 @@ object NestUI { } def usage() { - println("Usage: NestRunner [<options>] [<testfile> ..] [<resfile>]") - println(" <testfile>: list of files ending in '.scala'") - println(" <resfile>: a file not ending in '.scala'") - println(" <options>:") + println("Usage: NestRunner [options] [test test ...]") println println(" Test categories:") println(" --all run all tests") @@ -74,16 +140,12 @@ object NestUI { println(" --jvm run JVM backend tests") println(" --res run resident compiler tests") println(" --scalacheck run ScalaCheck tests") - println(" --script run script runner tests") - println(" --shootout run shootout tests") println(" --instrumented run instrumented tests") println(" --presentation run presentation compiler tests") - println(" --grep <expr> run all tests whose source file contains <expr>") println println(" Other options:") println(" --pack pick compiler/reflect/library in build/pack, and run all tests") - println(" --show-log show log") - println(" --show-diff show diff between log and check file") + println(" --grep <expr> run all tests whose source file contains <expr>") println(" --failed run only those tests that failed during the last run") println(" --update-check instead of failing tests with output change, update checkfile. (Use with care!)") println(" --verbose show progress information") @@ -100,11 +162,28 @@ object NestUI { } var _verbose = false + var _debug = false + var _terse = false + def isVerbose = _verbose + def isDebug = _debug + def isTerse = _terse + + def setVerbose() { + _verbose = true + } + def setDebug() { + _debug = true + } + def setTerse() { + _terse = true + } def verbose(msg: String) { - if (_verbose) { - outline("debug: ") - println(msg) - } + if (isVerbose) + System.err.println(msg) + } + def debug(msg: String) { + if (isDebug) + System.err.println(msg) } } diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala index 02651c527b..7c005b4f61 100644 --- a/src/partest/scala/tools/partest/nest/PathSettings.scala +++ b/src/partest/scala/tools/partest/nest/PathSettings.scala @@ -71,6 +71,9 @@ object PathSettings { findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse { sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir)) } + + lazy val diffUtils: File = + findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.") } class PathSettings() { diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 05cae7b238..9780e82cd9 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -51,10 +51,13 @@ class ReflectiveRunner { new ConsoleFileManager // this is a workaround for https://issues.scala-lang.org/browse/SI-5433 - // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed + // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed // we hack into the classloader that will become parent classloader for scalac // this way we ensure that reflective macro lookup will pick correct Code.lift - val sepUrls = PathSettings.srcCodeLib.toURI.toURL :: fileManager.latestUrls + // it's also used to inject diffutils into the classpath when running partest from the test/partest script + val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils) + val sepUrls = srcCodeLibAndDiff.map(_.toURI.toURL) ::: fileManager.latestUrls + // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script val sepLoader = new URLClassLoader(sepUrls.toArray, null) if (isPartestDebug) diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala new file mode 100644 index 0000000000..fc56818bfc --- /dev/null +++ b/src/partest/scala/tools/partest/nest/Runner.scala @@ -0,0 +1,628 @@ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ +package scala.tools.partest +package nest + +import java.io.{ Console => _, _ } +import java.net.URL +import scala.tools.nsc.Properties.{ jdkHome, javaHome, propOrElse, propOrEmpty } +import scala.util.Properties.{ envOrElse, isWin } +import scala.tools.nsc.{ Settings, CompilerCommand, Global } +import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile } +import scala.tools.nsc.reporters.ConsoleReporter +import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceString } +import ClassPath.{ join, split } +import scala.tools.scalap.scalax.rules.scalasig.ByteCode +import scala.collection.{ mutable, immutable } +import scala.sys.process.Process +import java.util.concurrent.{ Executors, TimeUnit, TimeoutException } +import PartestDefaults.{ javaCmd, javacCmd } +import scala.tools.scalap.Main.decompileScala + +trait PartestRunSettings { + def gitPath: Path + def reportPath: Path + def logPath: Path + + def testPaths: List[Path] + + def gitDiffOptions: List[String] + def extraScalacOptions: List[String] + def extraJavaOptions: List[String] +} + +class TestTranscript { + import NestUI.color._ + private val buf = mutable.ListBuffer[String]() + private def pass(s: String) = bold(green("% ")) + s + private def fail(s: String) = bold(red("% ")) + s + + def add(action: String): this.type = { buf += action ; this } + def append(text: String) { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) } + + // Colorize prompts according to pass/fail + def fail: List[String] = buf.toList match { + case Nil => Nil + case xs => (xs.init map pass) :+ fail(xs.last) + } +} + +class Runner(val testFile: File, fileManager: FileManager) { + import fileManager._ + + // Override to true to have the outcome of this test displayed + // whether it passes or not; in general only failures are reported, + // except for a . per passing test to show progress. + def isEnumeratedTest = false + + def testRunParams: TestRunParams = ??? + + private var _lastState: TestState = null + private var _transcript = new TestTranscript + + def lastState = if (_lastState == null) TestState.Uninitialized(testFile) else _lastState + def setLastState(s: TestState) = _lastState = s + def transcript: List[String] = _transcript.fail ++ logFile.fileLines + def pushTranscript(msg: String) = _transcript add msg + + val parentFile = testFile.getParentFile + val kind = parentFile.getName + val fileBase = basename(testFile.getName) + val logFile = new File(parentFile, s"$fileBase-$kind.log") + val outFile = logFile changeExtension "obj" + val checkFile = testFile changeExtension "check" + val flagsFile = testFile changeExtension "flags" + val testIdent = testFile.testIdent // e.g. pos/t1234 + + lazy val outDir = { outFile.mkdirs() ; outFile } + + type RanOneTest = (Boolean, LogContext) + + def showCrashInfo(t: Throwable) { + System.err.println("Crashed running test $testIdent: " + t) + if (!isPartestTerse) + System.err.println(stackTraceString(t)) + } + protected def crashHandler: PartialFunction[Throwable, TestState] = { + case t: InterruptedException => + genTimeout() + case t: Throwable => + showCrashInfo(t) + logFile.appendAll(stackTraceString(t)) + genCrash(t) + } + + def genPass() = TestState.Pass(testFile) + def genFail(reason: String) = TestState.Fail(testFile, reason, _transcript.fail) + def genTimeout() = TestState.Fail(testFile, "timed out", _transcript.fail) + def genCrash(caught: Throwable) = TestState.Crash(testFile, caught, _transcript.fail) + + def speclib = PathSettings.srcSpecLib.toString // specialization lib + def codelib = PathSettings.srcCodeLib.toString // reify lib + + // Prepend to a classpath, but without incurring duplicate entries + def prependTo(classpath: String, path: String): String = { + val segments = ClassPath split classpath + + if (segments startsWith path) classpath + else ClassPath.join(path :: segments distinct: _*) + } + + def prependToJavaClasspath(path: String) { + val jcp = sys.props.getOrElse("java.class.path", "") + prependTo(jcp, path) match { + case `jcp` => + case cp => sys.props("java.class.path") = cp + } + } + def prependToClasspaths(s: Settings, path: String) { + prependToJavaClasspath(path) + val scp = s.classpath.value + prependTo(scp, path) match { + case `scp` => + case cp => s.classpath.value = cp + } + } + + private def workerError(msg: String): Unit = System.err.println("Error: " + msg) + + def javac(files: List[File]): TestState = { + // compile using command-line javac compiler + val args = Seq( + javacCmd, + "-d", + outDir.getAbsolutePath, + "-classpath", + join(outDir.toString, CLASSPATH) + ) ++ files.map(_.getAbsolutePath) + + pushTranscript(args mkString " ") + val captured = StreamCapture(runCommand(args, logFile)) + if (captured.result) genPass() else { + logFile appendAll captured.stderr + genFail("java compilation failed") + } + } + + def testPrompt = kind match { + case "res" => "nsc> " + case _ => "% " + } + + def nextTestAction[T](body: => T)(failFn: PartialFunction[T, TestState]): T = { + val result = body + setLastState( if (failFn isDefinedAt result) failFn(result) else genPass() ) + result + } + def nextTestActionExpectTrue[T](reason: String, body: => Boolean): Boolean = { + nextTestAction(body) { case false => genFail(reason) } + } + + private def assembleTestCommand(outDir: File, logFile: File): List[String] = { + // check whether there is a ".javaopts" file + val argsFile = testFile changeExtension "javaopts" + val argString = file2String(argsFile) + if (argString != "") + NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString)) + + val testFullPath = testFile.getAbsolutePath + + // Note! As this currently functions, JAVA_OPTS must precede argString + // because when an option is repeated to java only the last one wins. + // That means until now all the .javaopts files were being ignored because + // they all attempt to change options which are also defined in + // partest.java_opts, leading to debug output like: + // + // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k' + // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...] + val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil + val propertyOptions = List( + "-Dfile.encoding=UTF-8", + "-Djava.library.path="+logFile.getParentFile.getAbsolutePath, + "-Dpartest.output="+outDir.getAbsolutePath, + "-Dpartest.lib="+LATEST_LIB, + "-Dpartest.reflect="+LATEST_REFLECT, + "-Dpartest.cwd="+outDir.getParent, + "-Dpartest.test-path="+testFullPath, + "-Dpartest.testname="+fileBase, + "-Djavacmd="+javaCmd, + "-Djavaccmd="+javacCmd, + "-Duser.language=en", + "-Duser.country=US" + ) ++ extras + + val classpath = if (extraClasspath != "") join(extraClasspath, CLASSPATH) else CLASSPATH + + javaCmd +: ( + (JAVA_OPTS.split(' ') ++ extraJavaOptions.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "").toList ++ Seq( + "-classpath", + join(outDir.toString, classpath) + ) ++ propertyOptions ++ Seq( + "scala.tools.nsc.MainGenericRunner", + "-usejavacp", + "Test", + "jvm" + ) + ) + } + + /** Runs command redirecting standard out and + * error out to output file. + */ + private def runCommand(args: Seq[String], outFile: File): Boolean = { + (Process(args) #> outFile !) == 0 + } + + private def execTest(outDir: File, logFile: File): Boolean = { + val cmd = assembleTestCommand(outDir, logFile) + + pushTranscript(cmd.mkString(" \\\n ") + " > " + logFile.getName) + nextTestActionExpectTrue("non-zero exit code", runCommand(cmd, logFile)) || { + _transcript append logFile.fileContents + false + } + } + + override def toString = s"""Test($testIdent, lastState = $lastState)""" + + def newTestWriters() = { + val swr = new StringWriter + val wr = new PrintWriter(swr, true) + // diff = "" + + ((swr, wr)) + } + + def fail(what: Any) = { + NestUI.verbose("scalac: compilation of "+what+" failed\n") + false + } + + /** Filter the diff for conditional blocks. + * The check file can contain lines of the form: + * `#partest java7` + * where the line contains a conventional flag name. + * In the diff output, these lines have the form: + * `> #partest java7` + * Blocks which don't apply are filtered out, + * and what remains is the desired diff. + * Line edit commands such as `0a1,6` don't count + * as diff, so return a nonempty diff only if + * material diff output was seen. + * Filtering the diff output (instead of every check + * file) means that we only post-process a test that + * might be failing, in the normal case. + */ + def diffilter(d: String) = { + import scala.util.Properties.javaVersion + val prefix = "#partest" + val margin = "> " + val leader = margin + prefix + // use lines in block so labeled? Default to sure, go ahead. + def retainOn(f: String) = f match { + case "java7" => javaVersion startsWith "1.7" + case "java6" => javaVersion startsWith "1.6" + case _ => true + } + if (d contains prefix) { + val sb = new StringBuilder + var retain = true // use the current line + var material = false // saw a line of diff + for (line <- d.lines) + if (line startsWith leader) { + val rest = (line stripPrefix leader).trim + retain = retainOn(rest) + } else if (retain) { + if (line startsWith margin) material = true + sb ++= line + sb ++= EOL + } + if (material) sb.toString else "" + } else d + } + + def currentDiff = ( + if (checkFile.canRead) diffilter(compareFiles(logFile, checkFile)) + else compareContents(augmentString(file2String(logFile)).lines.toList, Nil) + ) + + val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead) + val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options") + // --color=always --word-diff + + def gitDiff(f1: File, f2: File): Option[String] = { + try gitRunner map { git => + val cmd = s"$git diff $gitDiffOptions $f1 $f2" + val diff = Process(cmd).lines_!.drop(4).map(_ + "\n").mkString + + "\n" + diff + } + catch { case t: Exception => None } + } + + /** This does something about absolute paths and file separator + * chars before diffing. + */ + def normalizeLog() { + // squashing // in paths also munges line comments, so save this innovation for another time. + // (There are line comments in the "stub implementations" error output.) + //val slashes = """[/\\]+""".r + //def squashSlashes(s: String) = slashes replaceAllIn (s, "/") + def squashSlashes(s: String) = s replace ('\\', '/') + val base = squashSlashes(parentFile.getAbsolutePath + File.separator) + val quoted = """\Q%s\E""" format base + val baseless = (if (isWin) "(?i)" + quoted else quoted).r + def canonicalize(s: String) = baseless replaceAllIn (squashSlashes(s), "") + logFile mapInPlace canonicalize + } + + def diffIsOk: Boolean = { + val diff = currentDiff + val ok: Boolean = (diff == "") || { + fileManager.updateCheck && { + NestUI.verbose("Updating checkfile " + checkFile) + checkFile writeAll file2String(logFile) + true + } + } + pushTranscript(s"diff $logFile $checkFile") + nextTestAction(ok) { + case false => + // Get a word-highlighted diff from git if we can find it + val bestDiff = if (ok) "" else { + if (checkFile.canRead) + gitDiff(logFile, checkFile) getOrElse { + s"diff $logFile $checkFile\n$diff" + } + else diff + } + _transcript append bestDiff + genFail("output differs") + // TestState.fail("output differs", "output differs", + // genFail("output differs") + // TestState.Fail("output differs", bestDiff) + } + } + + /** 1. Creates log file and output directory. + * 2. Runs script function, providing log file and output directory as arguments. + */ + def runInContext(body: => Boolean): (Boolean, LogContext) = { + val (swr, wr) = newTestWriters() + val succeeded = body + (succeeded, LogContext(logFile, swr, wr)) + } + + /** Grouped files in group order, and lex order within each group. */ + def groupedFiles(dir: File): List[List[File]] = { + val testFiles = dir.listFiles.toList filter (_.isJavaOrScala) + val grouped = testFiles groupBy (_.group) + grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName)) + } + + def newCompiler = new DirectCompiler(fileManager) + + def attemptCompile(sources: List[File]): TestState = { + val state = newCompiler.compile(this, flagsForCompilation(sources), sources) + if (!state.isOk) + _transcript append ("\n" + file2String(logFile)) + + state + } + + // snort or scarf all the contributing flags files + def flagsForCompilation(sources: List[File]): List[String] = { + def argsplitter(s: String) = words(s) filter (_.nonEmpty) + val perTest = argsplitter(flagsFile.fileContents) + val perGroup = if (testFile.isDirectory) { + sources flatMap { f => SFile(Path(f) changeExtension "flags").safeSlurp map argsplitter getOrElse Nil } + } else Nil + perTest ++ perGroup + } + + abstract class CompileRound { + def fs: List[File] + def result: TestState + def description: String + + def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " " + def isOk = result.isOk + def mkScalacString(): String = { + val flags = file2String(flagsFile) match { + case "" => "" + case s => " " + s + } + s"""scalac $fsString""" + } + override def toString = description + ( if (result.isOk) "" else "\n" + result.status ) + } + case class OnlyJava(fs: List[File]) extends CompileRound { + def description = s"""javac $fsString""" + lazy val result = { pushTranscript(description) ; javac(fs) } + } + case class OnlyScala(fs: List[File]) extends CompileRound { + def description = mkScalacString() + lazy val result = { pushTranscript(description) ; attemptCompile(fs) } + } + case class ScalaAndJava(fs: List[File]) extends CompileRound { + def description = mkScalacString() + lazy val result = { pushTranscript(description) ; attemptCompile(fs) } + } + + def compilationRounds(file: File): List[CompileRound] = { + val grouped = if (file.isDirectory) groupedFiles(file) else List(List(file)) + + (grouped map mixedCompileGroup).flatten + } + def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = { + val (scalaFiles, javaFiles) = allFiles partition (_.isScala) + val isMixed = javaFiles.nonEmpty && scalaFiles.nonEmpty + val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles)) + val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles)) + val round3 = if (!isMixed) None else Some(OnlyScala(scalaFiles)) + + List(round1, round2, round3).flatten + } + + def runNegTest() = runInContext { + val rounds = compilationRounds(testFile) + + if (rounds forall (x => nextTestActionExpectTrue("compilation failed", x.isOk))) + nextTestActionExpectTrue("expected compilation failure", false) + else { + normalizeLog // put errors in a normal form + diffIsOk + } + } + + def runTestCommon(andAlso: => Boolean): (Boolean, LogContext) = runInContext { + compilationRounds(testFile).forall(x => nextTestActionExpectTrue("compilation failed", x.isOk)) && andAlso + } + + // Apache Ant 1.6 or newer + def ant(args: Seq[String], output: File): Boolean = { + val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/")) + val antLibDir = Directory(antDir / "lib") + val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path + val antOptions = + if (NestUI._verbose) List("-verbose", "-noinput") + else List("-noinput") + val cmd = javaCmd +: ( + JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq( + "-classpath", + antLauncherPath, + "org.apache.tools.ant.launch.Launcher" + ) ++ antOptions ++ args + ) + + runCommand(cmd, output) + } + + def runAntTest(): (Boolean, LogContext) = { + val (swr, wr) = newTestWriters() + + val succeeded = try { + val binary = "-Dbinary="+( + if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick" + else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack" + else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest" + else "installed" + ) + val args = Array(binary, "-logfile", logFile.getPath, "-file", testFile.getPath) + NestUI.verbose("ant "+args.mkString(" ")) + + pushTranscript(s"ant ${args.mkString(" ")}") + nextTestActionExpectTrue("ant failed", ant(args, logFile)) && diffIsOk + } + catch { // *catch-all* + case e: Exception => + NestUI.warning("caught "+e) + false + } + + (succeeded, LogContext(logFile, swr, wr)) + } + + def extraClasspath = kind match { + case "specialized" => PathSettings.srcSpecLib.toString + case _ => "" + } + def extraJavaOptions = kind match { + case "instrumented" => "-javaagent:"+PathSettings.instrumentationAgentLib + case _ => "" + } + + def runScalacheckTest() = runTestCommon { + NestUI.verbose("compilation of "+testFile+" succeeded\n") + + val outURL = outDir.getAbsoluteFile.toURI.toURL + val logWriter = new PrintStream(new FileOutputStream(logFile), true) + + Output.withRedirected(logWriter) { + // this classloader is test specific: its parent contains library classes and others + ScalaClassLoader.fromURLs(List(outURL), testRunParams.scalaCheckParentClassLoader).run("Test", Nil) + } + + NestUI.verbose(file2String(logFile)) + // obviously this must be improved upon + val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer; + lines.forall(x => !x.startsWith("!")) || { + NestUI.normal("ScalaCheck test failed. Output:\n") + lines foreach (x => NestUI.normal(x + "\n")) + false + } + } + + def runResidentTest() = { + // simulate resident compiler loop + val prompt = "\nnsc> " + val (swr, wr) = newTestWriters() + + NestUI.verbose(this+" running test "+fileBase) + val dir = parentFile + val resFile = new File(dir, fileBase + ".res") + + // run compiler in resident mode + // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@" + val sourcedir = logFile.getParentFile.getAbsoluteFile + val sourcepath = sourcedir.getAbsolutePath+File.separator + NestUI.verbose("sourcepath: "+sourcepath) + + val argList = List( + "-d", outDir.getAbsoluteFile.getPath, + "-Xresident", + "-sourcepath", sourcepath) + + // configure input/output files + val logOut = new FileOutputStream(logFile) + val logWriter = new PrintStream(logOut, true) + val resReader = new BufferedReader(new FileReader(resFile)) + val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true) + + // create compiler + val settings = new Settings(workerError) + settings.sourcepath.value = sourcepath + settings.classpath.value = fileManager.CLASSPATH + val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter) + val command = new CompilerCommand(argList, settings) + object compiler extends Global(command.settings, reporter) + + def resCompile(line: String): Boolean = { + // NestUI.verbose("compiling "+line) + val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath) + // NestUI.verbose("cmdArgs: "+cmdArgs) + val sett = new Settings(workerError) + sett.sourcepath.value = sourcepath + val command = new CompilerCommand(cmdArgs, sett) + // "scalac " + command.files.mkString(" ") + pushTranscript("scalac " + command.files.mkString(" ")) + nextTestActionExpectTrue( + "compilation failed", + command.ok && { + (new compiler.Run) compile command.files + !reporter.hasErrors + } + ) + } + def loop(): Boolean = { + logWriter.print(prompt) + resReader.readLine() match { + case null | "" => logWriter.close() ; true + case line => resCompile(line) && loop() + } + } + // res/t687.res depends on ignoring its compilation failure + // and just looking at the diff, so I made them all do that + // because this is long enough. + if (!Output.withRedirected(logWriter)(try loop() finally resReader.close())) + setLastState(genPass()) + + normalizeLog // put errors in a normal form + (diffIsOk, LogContext(logFile, swr, wr)) + } + + def run(): TestState = { + if (kind == "neg" || (kind endsWith "-neg")) runNegTest() + else kind match { + case "pos" => runTestCommon(true) + case "ant" => runAntTest() + case "scalacheck" => runScalacheckTest() + case "res" => runResidentTest() + case "scalap" => runScalapTest() + case "script" => runScriptTest() + case _ => runTestCommon(execTest(outDir, logFile) && diffIsOk) + } + + lastState + } + + def runScalapTest() = runTestCommon { + val isPackageObject = testFile.getName startsWith "package" + val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package") + val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader) + val byteCode = ByteCode forClass (loader loadClass className) + val result = decompileScala(byteCode.bytes, isPackageObject) + + logFile writeAll result + diffIsOk + } + def runScriptTest() = { + import scala.sys.process._ + val (swr, wr) = newTestWriters() + + val args = file2String(testFile changeExtension "args") + val cmdFile = if (isWin) testFile changeExtension "bat" else testFile + val succeeded = (((cmdFile + " " + args) #> logFile !) == 0) && diffIsOk + + (succeeded, LogContext(logFile, swr, wr)) + } + + def cleanup() { + if (lastState.isOk) + logFile.delete() + if (!isPartestDebug) + Directory(outDir).deleteRecursively() + } +} diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index 8f28277a6c..1c689714c7 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -8,8 +8,6 @@ package nest import java.io._ import java.net.URL -import java.util.{ Timer, TimerTask } - import scala.tools.nsc.Properties.{ jdkHome, javaHome, propOrElse } import scala.util.Properties.{ envOrElse, isWin } import scala.tools.nsc.{ Settings, CompilerCommand, Global } @@ -22,6 +20,7 @@ import scala.collection.{ mutable, immutable } import scala.sys.process._ import java.util.concurrent.{ Executors, TimeUnit, TimeoutException } import PartestDefaults.{ javaCmd, javacCmd } +import scala.tools.scalap.Main.decompileScala class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)]) @@ -75,689 +74,22 @@ object Output { class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunParams) { import fileManager._ - - val compileMgr = new CompileManager(fileManager) fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck - - private def compareFiles(f1: File, f2: File): String = - try fileManager.compareFiles(f1, f2) - catch { case t: Exception => t.toString } - - /** This does something about absolute paths and file separator - * chars before diffing. - */ - private def replaceSlashes(dir: File, s: String): String = { - val base = (dir.getAbsolutePath + File.separator).replace('\\', '/') - var regex = """\Q%s\E""" format base - if (isWin) regex = "(?i)" + regex - s.replace('\\', '/').replaceAll(regex, "") - } - - private def workerError(msg: String): Unit = System.err.println("Error: " + msg) - - private def printInfoStart(file: File, printer: PrintWriter) { - NestUI.outline("testing: ", printer) - val filesdir = file.getAbsoluteFile.getParentFile.getParentFile - val testdir = filesdir.getParentFile - val totalWidth = 56 - val name = { - // 1. try with [...]/files/run/test.scala - val name = file.getAbsolutePath drop testdir.getAbsolutePath.length - if (name.length <= totalWidth) name - // 2. try with [...]/run/test.scala - else file.getAbsolutePath drop filesdir.getAbsolutePath.length - } - NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer) - } - - private def printInfoEnd(success: Boolean, printer: PrintWriter) { - NestUI.normal("[", printer) - if (success) NestUI.success(" OK ", printer) - else NestUI.failure("FAILED", printer) - NestUI.normal("]\n", printer) - } - - private def printInfoTimeout(printer: PrintWriter) { - NestUI.normal("[", printer) - NestUI.failure("TIMOUT", printer) - NestUI.normal("]\n", printer) - } - - private def javac(outDir: File, files: List[File], output: File): CompilationOutcome = { - // compile using command-line javac compiler - val args = Seq( - javacCmd, - "-d", - outDir.getAbsolutePath, - "-classpath", - join(outDir.toString, CLASSPATH) - ) ++ files.map("" + _) - - try if (runCommand(args, output)) CompileSuccess else CompileFailed - catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n", CompilerCrashed) - } - - /** Runs command redirecting standard out and error out to output file. - * Overloaded to accept a sequence of arguments. - */ - private def runCommand(args: Seq[String], outFile: File): Boolean = { - NestUI.verbose("running command:\n"+args.map(" " + _ + "\n").mkString) - runCommandImpl(Process(args), outFile) - } - - /** Runs command redirecting standard out and error out to output file. - * Overloaded to accept a single string = concatenated command + arguments. - */ - private def runCommand(command: String, outFile: File): Boolean = { - NestUI.verbose("running command:"+command) - runCommandImpl(Process(command), outFile) - } - - private def runCommandImpl(process: => ProcessBuilder, outFile: File): Boolean = { - val exitCode = (process #> outFile !) - // normalize line endings - // System.getProperty("line.separator") should be "\n" here - // so reading a file and writing it back should convert all CRLFs to LFs - SFile(outFile).printlnAll(SFile(outFile).lines.toList: _*) - exitCode == 0 - } - - @inline private def isJava(f: File) = SFile(f) hasExtension "java" - @inline private def isScala(f: File) = SFile(f) hasExtension "scala" - @inline private def isJavaOrScala(f: File) = isJava(f) || isScala(f) - - private def outputLogFile(logFile: File) { - val lines = SFile(logFile).lines - if (lines.nonEmpty) { - NestUI.normal("Log file '" + logFile + "': \n") - lines foreach (x => NestUI.normal(x + "\n")) - } - } - private def logStackTrace(logFile: File, t: Throwable, msg: String): Boolean = { - SFile(logFile).writeAll(msg, stackTraceString(t)) - outputLogFile(logFile) // if running the test threw an exception, output log file - false - } - - private def exHandler[T](logFile: File, msg: String, value: T): PartialFunction[Throwable, T] = { - case e: Exception => logStackTrace(logFile, e, msg) ; value - } - - class Runner(testFile: File) { - var testDiff: String = "" - var passed: Option[Boolean] = None - - val fileBase = basename(testFile.getName) - val logFile = fileManager.getLogFile(testFile, kind) - val parent = testFile.getParentFile - val outDir = new File(parent, "%s-%s.obj".format(fileBase, kind)) - def toDelete = if (isPartestDebug) Nil else List( - if (passed exists (x => x)) Some(logFile) else None, - if (outDir.isDirectory) Some(outDir) else None - ).flatten - - private def createOutputDir(): File = { - outDir.mkdirs() - outDir - } - - private def execTest(outDir: File, logFile: File, classpathPrefix: String = "", javaOpts: String = ""): Boolean = { - // check whether there is a ".javaopts" file - val argsFile = new File(logFile.getParentFile, fileBase + ".javaopts") - val argString = file2String(argsFile) - if (argString != "") - NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString)) - - val testFullPath = { - val d = new File(logFile.getParentFile, fileBase) - if (d.isDirectory) d.getAbsolutePath - else { - val f = new File(logFile.getParentFile, fileBase + ".scala") - if (f.isFile) f.getAbsolutePath - else "" - } - } - - // Note! As this currently functions, JAVA_OPTS must precede argString - // because when an option is repeated to java only the last one wins. - // That means until now all the .javaopts files were being ignored because - // they all attempt to change options which are also defined in - // partest.java_opts, leading to debug output like: - // - // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k' - // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...] - val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil - val propertyOptions = List( - "-Dfile.encoding=UTF-8", - "-Djava.library.path="+logFile.getParentFile.getAbsolutePath, - "-Dpartest.output="+outDir.getAbsolutePath, - "-Dpartest.lib="+LATEST_LIB, - "-Dpartest.reflect="+LATEST_REFLECT, - "-Dpartest.comp="+LATEST_COMP, - "-Dpartest.cwd="+outDir.getParent, - "-Dpartest.test-path="+testFullPath, - "-Dpartest.testname="+fileBase, - "-Djavacmd="+javaCmd, - "-Djavaccmd="+javacCmd, - "-Duser.language=en", - "-Duser.country=US" - ) ++ extras - - val classpath = if (classpathPrefix != "") join(classpathPrefix, CLASSPATH) else CLASSPATH - val cmd = javaCmd +: ( - (JAVA_OPTS.split(' ') ++ javaOpts.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "") ++ Seq( - "-classpath", - join(outDir.toString, classpath) - ) ++ propertyOptions ++ Seq( - "scala.tools.nsc.MainGenericRunner", - "-usejavacp", - "Test", - "jvm" - ) - ) - - runCommand(cmd, logFile) - } - - private def getCheckFilePath(dir: File, suffix: String) = { - def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile - - if (chkFile("").isFile || suffix == "") chkFile("") - else chkFile("-" + suffix) - } - - private def compareOutput(dir: File, logFile: File): String = { - val checkFile = getCheckFilePath(dir, kind) - val diff = - if (checkFile.canRead) compareFiles(logFile, checkFile.jfile) - else file2String(logFile) - - // if check file exists, compare with log file - if (diff != "" && fileManager.updateCheck) { - NestUI.verbose("Updating checkfile " + checkFile.jfile) - val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "") - toWrite writeAll file2String(logFile) - "" - } - else diff - } - - def newTestWriters() = { - val swr = new StringWriter - val wr = new PrintWriter(swr, true) - - ((swr, wr)) - } - - def diffCheck(testFile: File, diff: String) = { - testDiff = diff - testDiff == "" - } - - /** 1. Creates log file and output directory. - * 2. Runs script function, providing log file and output directory as arguments. - */ - def runInContext(file: File, script: (File, File) => Boolean): (Boolean, LogContext) = { - val (swr, wr) = newTestWriters() - printInfoStart(file, wr) - - NestUI.verbose(this+" running test "+fileBase) - val outDir = createOutputDir() - NestUI.verbose("output directory: "+outDir) - - // run test-specific code - val succeeded = try { - if (isPartestDebug) { - val (result, millis) = timed(script(logFile, outDir)) - fileManager.recordTestTiming(file.getPath, millis) - result - } - else script(logFile, outDir) - } - catch exHandler(logFile, "", false) - - (succeeded, LogContext(logFile, swr, wr)) - } - - def groupedFiles(dir: File): List[List[File]] = { - val testFiles = dir.listFiles.toList filter isJavaOrScala - - def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num) - val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted) - val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted - - noGroupSuffix :: groups filterNot (_.isEmpty) - } - - def compileFilesIn(dir: File, logFile: File, outDir: File): CompilationOutcome = { - def compileGroup(g: List[File]): CompilationOutcome = { - val (scalaFiles, javaFiles) = g partition isScala - val allFiles = javaFiles ++ scalaFiles - - /* The test can contain both java and scala files, each of which should be compiled with the corresponding - * compiler. Since the source files can reference each other both ways (java referencing scala classes and - * vice versa, the partest compilation routine attempts to reach a "bytecode fixpoint" between the two - * compilers -- that's when bytecode generated by each compiler implements the signatures expected by the other. - * - * In theory this property can't be guaranteed, as neither compiler can know what signatures the other - * compiler expects and how to implement them. (see SI-1240 for the full story) - * - * In practice, this happens in 3 steps: - * STEP1: Feed all the files to scalac if there are also non-Scala sources. - * It will parse java files and obtain their expected signatures and generate bytecode for scala files - * STEP2: Feed the java files to javac if there are any. - * It will generate the bytecode for the java files and link to the scalac-generated bytecode for scala - * STEP3: (Re-)compile the scala sources so they link to the correct - * java signatures, in case the signatures deduced by scalac from the source files were wrong. Since the - * bytecode for java is already in place, we only feed the scala files to scalac so it will take the - * java signatures from the existing javac-generated bytecode. - * Note that no artifacts are deleted before this step. - */ - List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) { - case (CompileSuccess, 1) if scalaFiles.nonEmpty && javaFiles.nonEmpty => - compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) - case (CompileSuccess, 2) if javaFiles.nonEmpty => - javac(outDir, javaFiles, logFile) - case (CompileSuccess, 3) if scalaFiles.nonEmpty => - // TODO: Do we actually need this? SI-1240 is known to require this, but we don't know if other tests - // require it: https://groups.google.com/forum/?fromgroups#!topic/scala-internals/rFDKAcOKciU - compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) - - case (outcome, _) => outcome - } - } - groupedFiles(dir).foldLeft(CompileSuccess: CompilationOutcome) { - case (CompileSuccess, files) => compileGroup(files) - case (outcome, _) => outcome - } - } - - def runTestCommon(file: File, expectFailure: Boolean)( - onSuccess: (File, File) => Boolean, - onFail: (File, File) => Unit = (_, _) => ()): (Boolean, LogContext) = - { - runInContext(file, (logFile: File, outDir: File) => { - val outcome = ( - if (file.isDirectory) compileFilesIn(file, logFile, outDir) - else compileMgr.attemptCompile(None, List(file), kind, logFile) - ) - val result = ( - if (expectFailure) outcome.isNegative - else outcome.isPositive - ) - - if (result) onSuccess(logFile, outDir) - else { onFail(logFile, outDir) ; false } - }) - } - - def runJvmTest(file: File): (Boolean, LogContext) = - runTestCommon(file, expectFailure = false)((logFile, outDir) => { - val dir = file.getParentFile - - // adding codelib.jar to the classpath - // codelib provides the possibility to override standard reify - // this shields the massive amount of reification tests from changes in the API - execTest(outDir, logFile, PathSettings.srcCodeLib.toString) && { - // cannot replace paths here since this also inverts slashes - // which affects a bunch of tests - //fileManager.mapFile(logFile, replaceSlashes(dir, _)) - diffCheck(file, compareOutput(dir, logFile)) - } - }) - - // Apache Ant 1.6 or newer - def ant(args: Seq[String], output: File): Boolean = { - val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/")) - val antLibDir = Directory(antDir / "lib") - val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path - val antOptions = - if (NestUI._verbose) List("-verbose", "-noinput") - else List("-noinput") - val cmd = javaCmd +: ( - JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq( - "-classpath", - antLauncherPath, - "org.apache.tools.ant.launch.Launcher" - ) ++ antOptions ++ args - ) - - try runCommand(cmd, output) - catch exHandler(output, "ant command '" + cmd + "' failed:\n", false) - } - - def runAntTest(file: File): (Boolean, LogContext) = { - val (swr, wr) = newTestWriters() - printInfoStart(file, wr) - - NestUI.verbose(this+" running test "+fileBase) - - val succeeded = try { - val binary = "-Dbinary="+( - if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick" - else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack" - else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest" - else "installed" - ) - val args = Array(binary, "-logfile", logFile.path, "-file", file.path) - NestUI.verbose("ant "+args.mkString(" ")) - ant(args, logFile) && diffCheck(file, compareOutput(file.getParentFile, logFile)) - } - catch { // *catch-all* - case e: Exception => - NestUI.verbose("caught "+e) - false - } - - (succeeded, LogContext(logFile, swr, wr)) - } - - def runSpecializedTest(file: File): (Boolean, LogContext) = - runTestCommon(file, expectFailure = false)((logFile, outDir) => { - val dir = file.getParentFile - - // adding the instrumented library to the classpath - ( execTest(outDir, logFile, PathSettings.srcSpecLib.toString) && - diffCheck(file, compareOutput(dir, logFile)) - ) - }) - - def runInstrumentedTest(file: File): (Boolean, LogContext) = - runTestCommon(file, expectFailure = false)((logFile, outDir) => { - val dir = file.getParentFile - - // adding the javagent option with path to instrumentation agent - execTest(outDir, logFile, javaOpts = "-javaagent:"+PathSettings.instrumentationAgentLib) && - diffCheck(file, compareOutput(dir, logFile)) - }) - - def processSingleFile(file: File): (Boolean, LogContext) = kind match { - case "scalacheck" => - val succFn: (File, File) => Boolean = { (logFile, outDir) => - NestUI.verbose("compilation of "+file+" succeeded\n") - - val outURL = outDir.getAbsoluteFile.toURI.toURL - val logWriter = new PrintStream(new FileOutputStream(logFile), true) - - Output.withRedirected(logWriter) { - // this classloader is test specific: its parent contains library classes and others - ScalaClassLoader.fromURLs(List(outURL), params.scalaCheckParentClassLoader).run("Test", Nil) - } - - NestUI.verbose(file2String(logFile)) - // obviously this must be improved upon - val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer; - lines.forall(x => !x.startsWith("!")) || { - NestUI.normal("ScalaCheck test failed. Output:\n") - lines foreach (x => NestUI.normal(x + "\n")) - false - } - } - runTestCommon(file, expectFailure = false)( - succFn, - (logFile, outDir) => outputLogFile(logFile) - ) - - case "pos" => - runTestCommon(file, expectFailure = false)( - (logFile, outDir) => true, - (_, _) => () - ) - - case "neg" => - runTestCommon(file, expectFailure = true)((logFile, outDir) => { - // compare log file to check file - val dir = file.getParentFile - - // diff is contents of logFile - fileManager.mapFile(logFile, replaceSlashes(dir, _)) - diffCheck(file, compareOutput(dir, logFile)) - }) - - case "run" | "jvm" => - runJvmTest(file) - - case "specialized" => - runSpecializedTest(file) - - case "instrumented" => - runInstrumentedTest(file) - - case "presentation" => - runJvmTest(file) // for the moment, it's exactly the same as for a run test - - case "ant" => - runAntTest(file) - - case "res" => { - // simulate resident compiler loop - val prompt = "\nnsc> " - - val (swr, wr) = newTestWriters() - printInfoStart(file, wr) - - NestUI.verbose(this+" running test "+fileBase) - val dir = file.getParentFile - val outDir = createOutputDir() - val resFile = new File(dir, fileBase + ".res") - NestUI.verbose("outDir: "+outDir) - NestUI.verbose("logFile: "+logFile) - //NestUI.verbose("logFileErr: "+logFileErr) - NestUI.verbose("resFile: "+resFile) - - // run compiler in resident mode - // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@" - val sourcedir = logFile.getParentFile.getAbsoluteFile - val sourcepath = sourcedir.getAbsolutePath+File.separator - NestUI.verbose("sourcepath: "+sourcepath) - - val argList = List( - "-d", outDir.getAbsoluteFile.getPath, - "-Xresident", - "-sourcepath", sourcepath) - - // configure input/output files - val logOut = new FileOutputStream(logFile) - val logWriter = new PrintStream(logOut, true) - val resReader = new BufferedReader(new FileReader(resFile)) - val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true) - - // create compiler - val settings = new Settings(workerError) - settings.sourcepath.value = sourcepath - settings.classpath.value = fileManager.CLASSPATH - val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter) - val command = new CompilerCommand(argList, settings) - object compiler extends Global(command.settings, reporter) - - val resCompile = (line: String) => { - NestUI.verbose("compiling "+line) - val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath) - NestUI.verbose("cmdArgs: "+cmdArgs) - val sett = new Settings(workerError) - sett.sourcepath.value = sourcepath - val command = new CompilerCommand(cmdArgs, sett) - command.ok && { - (new compiler.Run) compile command.files - !reporter.hasErrors - } - } - - def loop(action: String => Boolean): Boolean = { - logWriter.print(prompt) - resReader.readLine() match { - case null | "" => logWriter.flush() ; true - case line => action(line) && loop(action) - } - } - - Output.withRedirected(logWriter) { - try loop(resCompile) - finally resReader.close() - } - fileManager.mapFile(logFile, replaceSlashes(dir, _)) - - (diffCheck(file, compareOutput(dir, logFile)), LogContext(logFile, swr, wr)) - } - - case "shootout" => - val (swr, wr) = newTestWriters() - printInfoStart(file, wr) - - NestUI.verbose(this+" running test "+fileBase) - val outDir = createOutputDir() - - // 2. define file {outDir}/test.scala that contains code to compile/run - val testFile = new File(outDir, "test.scala") - NestUI.verbose("outDir: "+outDir) - NestUI.verbose("logFile: "+logFile) - NestUI.verbose("testFile: "+testFile) - - // 3. cat {test}.scala.runner {test}.scala > testFile - val runnerFile = new File(parent, fileBase+".scala.runner") - val bodyFile = new File(parent, fileBase+".scala") - SFile(testFile).writeAll( - file2String(runnerFile), - file2String(bodyFile) - ) - - // 4. compile testFile - val ok = compileMgr.attemptCompile(None, List(testFile), kind, logFile) eq CompileSuccess - NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed")) - val result = ok && { - execTest(outDir, logFile) && { - NestUI.verbose(this+" finished running "+fileBase) - diffCheck(file, compareOutput(parent, logFile)) - } - } - - (result, LogContext(logFile, swr, wr)) - - case "scalap" => - runInContext(file, (logFile: File, outDir: File) => { - val sourceDir = Directory(if (file.isFile) file.getParent else file) - val sources = sourceDir.files filter (_ hasExtension "scala") map (_.jfile) toList - val results = sourceDir.files filter (_.name == "result.test") map (_.jfile) toList - - if (sources.length != 1 || results.length != 1) { - NestUI.warning("Misconfigured scalap test directory: " + sourceDir + " \n") - false - } - else { - val resFile = results.head - // 2. Compile source file - - if (!compileMgr.attemptCompile(Some(outDir), sources, kind, logFile).isPositive) { - NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir)) - false - } - else { - // 3. Decompile file and compare results - val isPackageObject = sourceDir.name startsWith "package" - val className = sourceDir.name.capitalize + (if (!isPackageObject) "" else ".package") - val url = outDir.toURI.toURL - val loader = ScalaClassLoader.fromURLs(List(url), this.getClass.getClassLoader) - val clazz = loader.loadClass(className) - - val byteCode = ByteCode.forClass(clazz) - val result = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject) - - SFile(logFile) writeAll result - diffCheck(file, compareFiles(logFile, resFile)) - } - } - }) - - case "script" => - val (swr, wr) = newTestWriters() - printInfoStart(file, wr) - - NestUI.verbose(this+" running test "+fileBase) - - // check whether there is an args file - val argsFile = new File(file.getParentFile, fileBase+".args") - NestUI.verbose("argsFile: "+argsFile) - val argString = file2String(argsFile) - val succeeded = try { - val cmdString = - if (isWin) { - val batchFile = new File(file.getParentFile, fileBase+".bat") - NestUI.verbose("batchFile: "+batchFile) - batchFile.getAbsolutePath - } - else file.getAbsolutePath - - val ok = runCommand(cmdString+argString, logFile) - ( ok && diffCheck(file, compareOutput(file.getParentFile, logFile)) ) - } - catch { case e: Exception => NestUI.verbose("caught "+e) ; false } - - (succeeded, LogContext(logFile, swr, wr)) - } - - private def crashContext(t: Throwable): LogContext = { - try { - logStackTrace(logFile, t, "Possible compiler crash during test of: " + testFile + "\n") - LogContext(logFile) - } - catch { case t: Throwable => LogContext(null) } - } - - def run(): (Boolean, LogContext) = { - val result = try processSingleFile(testFile) catch { case t: Throwable => (false, crashContext(t)) } - passed = Some(result._1) - result - } - - def reportResult(writers: Option[(StringWriter, PrintWriter)]) { - writers foreach { case (swr, wr) => - if (passed.isEmpty) printInfoTimeout(wr) - else printInfoEnd(passed.get, wr) - wr.flush() - swr.flush() - NestUI.normal(swr.toString) - - if (passed exists (x => !x)) { - if (fileManager.showDiff || isPartestDebug) - NestUI.normal(testDiff) - if (fileManager.showLog) - showLog(logFile) - } - } - toDelete foreach (_.deleteRecursively()) - } - } + fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath def runTest(testFile: File): TestState = { - val runner = new Runner(testFile) + val runner = new Runner(testFile, fileManager) { + override def testRunParams = params + } // when option "--failed" is provided execute test only if log // is present (which means it failed before) if (fileManager.failed && !runner.logFile.canRead) - return TestState.Ok - - val (success, ctx) = runner.run() - val state = if (success) TestState.Ok else TestState.Fail - - runner.reportResult(ctx.writers) - state - } - - private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] = - fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet - - private def copyTestFiles(testDir: File, destDir: File) { - val invalidExts = List("changes", "svn", "obj") - testDir.listFiles.toList filter ( - f => (isJavaOrScala(f) && f.isFile) || - (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach - { f => fileManager.copyFile(f, destDir) } - } - - private def showLog(logFile: File) { - file2String(logFile) match { - case "" if logFile.canRead => () - case "" => NestUI.failure("Couldn't open log file: " + logFile + "\n") - case s => NestUI.normal(s) + runner.genPass() + else { + val (state, elapsed) = timed(runner.run()) + NestUI.reportTest(state) + runner.cleanup() + state } } } diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala index 20f9c701d5..1cf3aa858f 100644 --- a/src/partest/scala/tools/partest/nest/SBTRunner.scala +++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala @@ -1,3 +1,6 @@ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + */ package scala.tools.partest package nest @@ -21,7 +24,7 @@ object SBTRunner extends DirectRunner { val testRootDir: Directory = Directory(testRootPath) } - def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.Map[String, TestState] = { + def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): java.util.List[TestState] = { def failedOnlyIfRequired(files:List[File]):List[File]={ if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files } @@ -33,7 +36,7 @@ object SBTRunner extends DirectRunner { scalacOptions: Seq[String] = Seq(), justFailedTests: Boolean = false) - def mainReflect(args: Array[String]): java.util.Map[String, String] = { + def mainReflect(args: Array[String]): java.util.List[TestState] = { setProp("partest.debug", "true") val Argument = new scala.util.matching.Regex("-(.*)") @@ -46,7 +49,7 @@ object SBTRunner extends DirectRunner { case x => sys.error("Unknown command line options: " + x) } val config = parseArgs(args, CommandLineOptions()) - fileManager.SCALAC_OPTS ++= config.scalacOptions + fileManager.SCALAC_OPTS = config.scalacOptions fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set") def findClasspath(jar: String, name: String): Option[String] = { @@ -67,22 +70,14 @@ object SBTRunner extends DirectRunner { // TODO - Make this a flag? //fileManager.updateCheck = true // Now run and report... - val runs = config.tests.filterNot(_._2.isEmpty) - (for { - (testType, files) <- runs - (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala - } yield (path, fixResult(result))).seq.asJava - } - def fixResult(result: TestState): String = result match { - case TestState.Ok => "OK" - case TestState.Fail => "FAIL" - case TestState.Timeout => "TIMEOUT" + val runs = config.tests.filterNot(_._2.isEmpty) + val result = runs.toList flatMap { case (kind, files) => reflectiveRunTestsForFiles(files, kind).asScala } + + result.asJava } + def main(args: Array[String]): Unit = { - val failures = ( - for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield - path + ( if (result == TestState.Fail) " [FAILED]" else " [TIMEOUT]" ) - ) + val failures = mainReflect(args).asScala collect { case s if !s.isOk => s.longStatus } // Re-list all failures so we can go figure out what went wrong. failures foreach System.err.println if(!failures.isEmpty) sys.exit(1) diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala new file mode 100644 index 0000000000..dc155b1787 --- /dev/null +++ b/src/partest/scala/tools/partest/nest/StreamCapture.scala @@ -0,0 +1,53 @@ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ +package scala.tools.partest +package nest + +import java.io.{ Console => _, _ } + +object StreamCapture { + case class Captured[T](stdout: String, stderr: String, result: T) { + override def toString = s""" + |result: $result + |[stdout] + |$stdout + |[stderr] + |$stderr""".stripMargin.trim + } + + private def mkStream = { + val swr = new StringWriter + val wr = new PrintWriter(swr, true) + val ostream = new PrintStream(new OutputStream { def write(b: Int): Unit = wr write b }, true) // autoFlush = true + + (ostream, () => { ostream.close() ; swr.toString }) + } + + def savingSystem[T](body: => T): T = { + val savedOut = System.out + val savedErr = System.err + try body + finally { + System setErr savedErr + System setOut savedOut + } + } + + def apply[T](body: => T): Captured[T] = { + val (outstream, stdoutFn) = mkStream + val (errstream, stderrFn) = mkStream + + val result = savingSystem { + System setOut outstream + System setErr errstream + Console.withOut(outstream) { + Console.withErr(errstream) { + body + } + } + } + Captured(stdoutFn(), stderrFn(), result) + } +} diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala deleted file mode 100644 index 880c6e431b..0000000000 --- a/src/partest/scala/tools/partest/nest/TestFile.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Philipp Haller - */ - -// $Id$ - -package scala.tools.partest -package nest - -import java.io.{ File => JFile } -import scala.tools.nsc.Settings -import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.io._ -import scala.util.Properties.{ propIsSet, propOrElse, setProp } - -trait TestFileCommon { - def file: JFile - def kind: String - - val dir = file.toAbsolute.parent - val fileBase = file.stripExtension - val flags = dir / (fileBase + ".flags") ifFile (f => f.slurp().trim) - - lazy val objectDir = dir / (fileBase + "-" + kind + ".obj") createDirectory true - def setOutDirTo = objectDir -} - -abstract class TestFile(val kind: String) extends TestFileCommon { - def file: JFile - def fileManager: FileManager - - def defineSettings(settings: Settings, setOutDir: Boolean) = { - settings.classpath append dir.path - if (setOutDir) - settings.outputDirs setSingleOutput setOutDirTo.path - - // adding codelib.jar to the classpath - // codelib provides the possibility to override standard reify - // this shields the massive amount of reification tests from changes in the API - settings.classpath prepend PathSettings.srcCodeLib.toString - if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", "")) - - // have to catch bad flags somewhere - (flags forall (f => settings.processArgumentString(f)._1)) && { - settings.classpath append fileManager.CLASSPATH - true - } - } - - override def toString(): String = "%s %s".format(kind, file) -} - -case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos") -case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg") -case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run") -case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck") -case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm") -case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") { - override def setOutDirTo = file.parent -} -case class ScalapTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalap") { - override def setOutDirTo = file.parent -} -case class SpecializedTestFile(file: JFile, fileManager: FileManager) extends TestFile("specialized") { - override def defineSettings(settings: Settings, setOutDir: Boolean): Boolean = { - super.defineSettings(settings, setOutDir) && { - // add the instrumented library version to classpath - settings.classpath prepend PathSettings.srcSpecLib.toString - // @partest maintainer: if we use a custom Scala build (specified via --classpath) - // then the classes provided by it will come earlier than instrumented.jar in the resulting classpath - // this entire classpath business needs a thorough solution - if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcSpecLib.toString + ";" + propOrElse("java.class.path", "")) - true - } - } -} -case class PresentationTestFile(file: JFile, fileManager: FileManager) extends TestFile("presentation") -case class AntTestFile(file: JFile, fileManager: FileManager) extends TestFile("ant") -case class InstrumentedTestFile(file: JFile, fileManager: FileManager) extends TestFile("instrumented") diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index 2b2ce2e435..9e21b0f6ba 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -4,31 +4,102 @@ package scala.tools -import java.io.{ FileNotFoundException, File => JFile } -import nsc.io.{ Path, Directory, File => SFile } -import scala.tools.util.PathResolver -import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty } import scala.sys.process.javaVmArguments import java.util.concurrent.Callable +import scala.tools.partest.nest.NestUI +import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional } -package partest { - class TestState { } - object TestState { - val Ok = new TestState - val Fail = new TestState - val Timeout = new TestState +package object partest { + type File = java.io.File + type SFile = scala.reflect.io.File + type Directory = scala.reflect.io.Directory + type Path = scala.reflect.io.Path + type PathResolver = scala.tools.util.PathResolver + type ClassPath[T] = scala.tools.nsc.util.ClassPath[T] + type StringWriter = java.io.StringWriter + + val SFile = scala.reflect.io.File + val Directory = scala.reflect.io.Directory + val Path = scala.reflect.io.Path + val PathResolver = scala.tools.util.PathResolver + val ClassPath = scala.tools.nsc.util.ClassPath + + val space = "\u0020" + val EOL = scala.compat.Platform.EOL + def onull(s: String) = if (s == null) "" else s + def oempty(xs: String*) = xs filterNot (x => x == null || x == "") + def ojoin(xs: String*): String = oempty(xs: _*) mkString space + def nljoin(xs: String*): String = oempty(xs: _*) mkString EOL + + def setUncaughtHandler() = { + Thread.setDefaultUncaughtExceptionHandler( + new Thread.UncaughtExceptionHandler { + def uncaughtException(thread: Thread, t: Throwable) { + val t1 = Exceptional unwrap t + System.err.println(s"Uncaught exception on thread $thread: $t1") + t1.printStackTrace() + } + } + ) } -} -package object partest { - import nest.NestUI + /** Sources have a numerical group, specified by name_7 and so on. */ + private val GroupPattern = """.*_(\d+)""".r + + implicit class FileOps(val f: File) { + private def sf = SFile(f) + + def testIdent = { + f.toString split """[/\\]+""" takeRight 2 mkString "/" // e.g. pos/t1234 + } + + def mapInPlace(mapFn: String => String): Unit = + writeAll(fileLines.map(x => mapFn(x) + "\n"): _*) + + def appendAll(strings: String*): Unit = sf.appendAll(strings: _*) + def writeAll(strings: String*): Unit = sf.writeAll(strings: _*) + def absolutePathSegments: List[String] = f.getAbsolutePath split """[/\\]+""" toList + + def isJava = f.isFile && (sf hasExtension "java") + def isScala = f.isFile && (sf hasExtension "scala") + def isJavaOrScala = isJava || isScala + + def extension = sf.extension + def hasExtension(ext: String) = sf hasExtension ext + def changeExtension(ext: String): File = (sf changeExtension ext).jfile + + /** The group number for this source file, or -1 for no group. */ + def group: Int = + sf.stripExtension match { + case GroupPattern(g) if g.toInt >= 0 => g.toInt + case _ => -1 + } + + def fileContents: String = try sf.slurp() catch { case _: java.io.FileNotFoundException => "" } + def fileLines: List[String] = augmentString(fileContents).lines.toList + } + + implicit class PathOps(p: Path) extends FileOps(p.jfile) { } + + implicit class Copier(val f: SFile) extends AnyVal { + def copyTo(dest: Path): Unit = dest.toFile writeAll f.slurp(scala.io.Codec.UTF8) + } - implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile - implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x) + implicit def temporaryPath2File(x: Path): File = x.jfile + implicit def stringPathToJavaFile(path: String): File = new File(path) implicit lazy val postfixOps = scala.language.postfixOps implicit lazy val implicitConversions = scala.language.implicitConversions + def fileSeparator = java.io.File.separator + def pathSeparator = java.io.File.pathSeparator + + def pathToTestIdent(path: Path) = path.jfile.testIdent + + def canonicalizeSlashes(line: String) = line.replaceAll("""[/\\]+""", "/") + + def words(s: String): List[String] = (s.trim split "\\s+").toList + def timed[T](body: => T): (T, Long) = { val t1 = System.currentTimeMillis val result = body @@ -39,15 +110,40 @@ package object partest { def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } - def file2String(f: JFile) = - try SFile(f).slurp(scala.io.Codec.UTF8) - catch { case _: FileNotFoundException => "" } + def file2String(f: File): String = f.fileContents def basename(name: String): String = Path(name).stripExtension - def resultsToStatistics(results: Iterable[(_, TestState)]): (Int, Int) = { - val (files, failures) = results map (_._2 == TestState.Ok) partition (_ == true) - (files.size, failures.size) + /** In order to allow for spaces in flags/options, this + * parses .flags, .javaopts, javacopts etc files as follows: + * If it is exactly one line, it is split (naively) on spaces. + * If it contains more than one line, each line is its own + * token, spaces and all. + */ + def readOptionsFile(file: File): List[String] = { + file.fileLines match { + case x :: Nil => words(x) + case xs => xs map (_.trim) + } + } + + def findProgram(name: String): Option[File] = { + val pathDirs = sys.env("PATH") match { + case null => List("/usr/local/bin", "/usr/bin", "/bin") + case path => path split "[:;]" filterNot (_ == "") toList + } + pathDirs.iterator map (d => new File(d, name)) find (_.canExecute) + } + + def now = (new java.util.Date).toString + def elapsedString(millis: Long): String = { + val elapsedSecs = millis/1000 + val elapsedMins = elapsedSecs/60 + val elapsedHrs = elapsedMins/60 + val dispMins = elapsedMins - elapsedHrs * 60 + val dispSecs = elapsedSecs - elapsedMins * 60 + + "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs) } def vmArgString = javaVmArguments.mkString( @@ -62,13 +158,10 @@ package object partest { } def showAllJVMInfo() { - NestUI.verbose(vmArgString) - NestUI.verbose(allPropertiesString) + vlog(vmArgString) + vlog(allPropertiesString) } - def isPartestDebug: Boolean = - propOrEmpty("partest.debug") == "true" - import scala.language.experimental.macros /** @@ -117,4 +210,10 @@ package object partest { a.tree))))), a.tree)) } + + def isPartestTerse = NestUI.isTerse + def isPartestDebug = NestUI.isDebug + def isPartestVerbose = NestUI.isVerbose + + def vlog(msg: => String) = if (isPartestVerbose) System.err.println(msg) } diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala index 09eaf7afb4..f87e10c792 100644 --- a/src/reflect/scala/reflect/api/Annotations.scala +++ b/src/reflect/scala/reflect/api/Annotations.scala @@ -45,12 +45,6 @@ trait Annotations { self: Universe => */ type Annotation >: Null <: AnyRef with AnnotationApi - /** A tag that preserves the identity of the `Annotation` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AnnotationTag: ClassTag[Annotation] - /** The constructor/extractor for `Annotation` instances. * @group Extractors */ @@ -90,11 +84,6 @@ trait Annotations { self: Universe => */ type JavaArgument >: Null <: AnyRef - /** A tag that preserves the identity of the `JavaArgument` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val JavaArgumentTag: ClassTag[JavaArgument] /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")` * @template @@ -102,12 +91,6 @@ trait Annotations { self: Universe => */ type LiteralArgument >: Null <: AnyRef with JavaArgument with LiteralArgumentApi - /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val LiteralArgumentTag: ClassTag[LiteralArgument] - /** The constructor/extractor for `LiteralArgument` instances. * @group Extractors */ @@ -137,12 +120,6 @@ trait Annotations { self: Universe => */ type ArrayArgument >: Null <: AnyRef with JavaArgument with ArrayArgumentApi - /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ArrayArgumentTag: ClassTag[ArrayArgument] - /** The constructor/extractor for `ArrayArgument` instances. * @group Extractors */ @@ -172,12 +149,6 @@ trait Annotations { self: Universe => */ type NestedArgument >: Null <: AnyRef with JavaArgument with NestedArgumentApi - /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val NestedArgumentTag: ClassTag[NestedArgument] - /** The constructor/extractor for `NestedArgument` instances. * @group Extractors */ @@ -200,4 +171,4 @@ trait Annotations { self: Universe => /** The underlying nested annotation. */ def annotation: Annotation } -}
\ No newline at end of file +} diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala index f3d75c3c00..0b7dd5582a 100644 --- a/src/reflect/scala/reflect/api/Constants.scala +++ b/src/reflect/scala/reflect/api/Constants.scala @@ -183,12 +183,6 @@ trait Constants { */ type Constant >: Null <: AnyRef with ConstantApi - /** A tag that preserves the identity of the `Constant` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ConstantTag: ClassTag[Constant] - /** The constructor/extractor for `Constant` instances. * @group Extractors */ diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index 4357aec9c9..712236cce1 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -61,12 +61,6 @@ trait FlagSets { self: Universe => */ type FlagSet - /** A tag that preserves the identity of the `FlagSet` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val FlagSetTag: ClassTag[FlagSet] - /** The API of `FlagSet` instances. * The main source of information about flag sets is the [[scala.reflect.api.FlagSets]] page. * @group Flags diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala index 3f377d6cff..fdc1d9017b 100644 --- a/src/reflect/scala/reflect/api/ImplicitTags.scala +++ b/src/reflect/scala/reflect/api/ImplicitTags.scala @@ -1,108 +1,116 @@ package scala.reflect package api +/** Tags which preserve the identity of abstract types in the face of erasure. + * Can be used for pattern matching, instance tests, serialization and the like. + * @group Tags + */ trait ImplicitTags { - self: Types => + self: Universe => - /** A tag that preserves the identity of the `Type` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeTagg: ClassTag[Type] - - /** A tag that preserves the identity of the `SingletonType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SingletonTypeTag: ClassTag[SingletonType] - - /** A tag that preserves the identity of the `ThisType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ThisTypeTag: ClassTag[ThisType] - - /** A tag that preserves the identity of the `SingleType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SingleTypeTag: ClassTag[SingleType] - - /** A tag that preserves the identity of the `SuperType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SuperTypeTag: ClassTag[SuperType] - - /** A tag that preserves the identity of the `ConstantType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ConstantTypeTag: ClassTag[ConstantType] - - /** A tag that preserves the identity of the `TypeRef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeRefTag: ClassTag[TypeRef] - - /** A tag that preserves the identity of the `CompoundType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val CompoundTypeTag: ClassTag[CompoundType] - - /** A tag that preserves the identity of the `RefinedType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val RefinedTypeTag: ClassTag[RefinedType] - - /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ + // Tags for Types. + implicit val AnnotatedTypeTag: ClassTag[AnnotatedType] + implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType] implicit val ClassInfoTypeTag: ClassTag[ClassInfoType] - - /** A tag that preserves the identity of the `MethodType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ + implicit val CompoundTypeTag: ClassTag[CompoundType] + implicit val ConstantTypeTag: ClassTag[ConstantType] + implicit val ExistentialTypeTag: ClassTag[ExistentialType] implicit val MethodTypeTag: ClassTag[MethodType] - - /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType] - - /** A tag that preserves the identity of the `PolyType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ implicit val PolyTypeTag: ClassTag[PolyType] - - /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ExistentialTypeTag: ClassTag[ExistentialType] - - /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AnnotatedTypeTag: ClassTag[AnnotatedType] - - /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ + implicit val RefinedTypeTag: ClassTag[RefinedType] + implicit val SingleTypeTag: ClassTag[SingleType] + implicit val SingletonTypeTag: ClassTag[SingletonType] + implicit val SuperTypeTag: ClassTag[SuperType] + implicit val ThisTypeTag: ClassTag[ThisType] implicit val TypeBoundsTag: ClassTag[TypeBounds] + implicit val TypeRefTag: ClassTag[TypeRef] + implicit val TypeTagg: ClassTag[Type] - /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType] + // Tags for Names. + implicit val NameTag: ClassTag[Name] + implicit val TermNameTag: ClassTag[TermName] + implicit val TypeNameTag: ClassTag[TypeName] + + // Tags for Scopes. + implicit val ScopeTag: ClassTag[Scope] + implicit val MemberScopeTag: ClassTag[MemberScope] + + // Tags for Annotations. + implicit val AnnotationTag: ClassTag[Annotation] + implicit val JavaArgumentTag: ClassTag[JavaArgument] + implicit val LiteralArgumentTag: ClassTag[LiteralArgument] + implicit val ArrayArgumentTag: ClassTag[ArrayArgument] + implicit val NestedArgumentTag: ClassTag[NestedArgument] + + // Tags for Symbols. + implicit val TermSymbolTag: ClassTag[TermSymbol] + implicit val MethodSymbolTag: ClassTag[MethodSymbol] + implicit val SymbolTag: ClassTag[Symbol] + implicit val TypeSymbolTag: ClassTag[TypeSymbol] + implicit val ModuleSymbolTag: ClassTag[ModuleSymbol] + implicit val ClassSymbolTag: ClassTag[ClassSymbol] + implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol] + implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol] + + // Tags for misc Tree relatives. + implicit val PositionTag: ClassTag[Position] + implicit val ConstantTag: ClassTag[Constant] + implicit val FlagSetTag: ClassTag[FlagSet] + implicit val ModifiersTag: ClassTag[Modifiers] + + // Tags for Trees. WTF. + implicit val AlternativeTag: ClassTag[Alternative] + implicit val AnnotatedTag: ClassTag[Annotated] + implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree] + implicit val ApplyTag: ClassTag[Apply] + implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg] + implicit val AssignTag: ClassTag[Assign] + implicit val BindTag: ClassTag[Bind] + implicit val BlockTag: ClassTag[Block] + implicit val CaseDefTag: ClassTag[CaseDef] + implicit val ClassDefTag: ClassTag[ClassDef] + implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree] + implicit val DefDefTag: ClassTag[DefDef] + implicit val DefTreeTag: ClassTag[DefTree] + implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree] + implicit val FunctionTag: ClassTag[Function] + implicit val GenericApplyTag: ClassTag[GenericApply] + implicit val IdentTag: ClassTag[Ident] + implicit val IfTag: ClassTag[If] + implicit val ImplDefTag: ClassTag[ImplDef] + implicit val ImportSelectorTag: ClassTag[ImportSelector] + implicit val ImportTag: ClassTag[Import] + implicit val LabelDefTag: ClassTag[LabelDef] + implicit val LiteralTag: ClassTag[Literal] + implicit val MatchTag: ClassTag[Match] + implicit val MemberDefTag: ClassTag[MemberDef] + implicit val ModuleDefTag: ClassTag[ModuleDef] + implicit val NameTreeTag: ClassTag[NameTree] + implicit val NewTag: ClassTag[New] + implicit val PackageDefTag: ClassTag[PackageDef] + implicit val RefTreeTag: ClassTag[RefTree] + implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed] + implicit val ReturnTag: ClassTag[Return] + implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree] + implicit val SelectTag: ClassTag[Select] + implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree] + implicit val StarTag: ClassTag[Star] + implicit val SuperTag: ClassTag[Super] + implicit val SymTreeTag: ClassTag[SymTree] + implicit val TemplateTag: ClassTag[Template] + implicit val TermTreeTag: ClassTag[TermTree] + implicit val ThisTag: ClassTag[This] + implicit val ThrowTag: ClassTag[Throw] + implicit val TreeTag: ClassTag[Tree] + implicit val TryTag: ClassTag[Try] + implicit val TypTreeTag: ClassTag[TypTree] + implicit val TypeApplyTag: ClassTag[TypeApply] + implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree] + implicit val TypeDefTag: ClassTag[TypeDef] + implicit val TypeTreeTag: ClassTag[TypeTree] + implicit val TypedTag: ClassTag[Typed] + implicit val UnApplyTag: ClassTag[UnApply] + implicit val ValDefTag: ClassTag[ValDef] + implicit val ValOrDefDefTag: ClassTag[ValOrDefDef] } diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index 6290b88d33..e7840a13fb 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -43,34 +43,16 @@ trait Names { */ type Name >: Null <: NameApi - /** A tag that preserves the identity of the `Name` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val NameTag: ClassTag[Name] - /** The abstract type of names representing terms. * @group Names */ type TypeName >: Null <: Name - /** A tag that preserves the identity of the `TypeName` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeNameTag: ClassTag[TypeName] - /** The abstract type of names representing types. * @group Names */ type TermName >: Null <: Name - /** A tag that preserves the identity of the `TermName` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TermNameTag: ClassTag[TermName] - /** The API of Name instances. * @group API */ diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala index 87f00fdb88..6edf8e13e4 100644 --- a/src/reflect/scala/reflect/api/Positions.scala +++ b/src/reflect/scala/reflect/api/Positions.scala @@ -19,13 +19,6 @@ trait Positions { * @group Positions */ type Position >: Null <: scala.reflect.api.Position { type Pos = Position } - - /** A tag that preserves the identity of the `Position` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val PositionTag: ClassTag[Position] - /** A special "missing" position. * @group Positions */ diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index d9e05e77c1..162fe1296b 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -163,6 +163,8 @@ trait Printers { self: Universe => import scala.language.implicitConversions implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value)) implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value) + import scala.reflect.internal.settings.MutableSettings + implicit def settingToBooleanFlag(setting: MutableSettings#BooleanSetting): BooleanFlag = BooleanFlag(Some(setting.value)) } /** @group Printers */ diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala index 7f9799393c..4bab6b6a04 100644 --- a/src/reflect/scala/reflect/api/Scopes.scala +++ b/src/reflect/scala/reflect/api/Scopes.scala @@ -33,12 +33,6 @@ trait Scopes { self: Universe => */ trait ScopeApi extends Iterable[Symbol] - /** A tag that preserves the identity of the `Scope` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ScopeTag: ClassTag[Scope] - /** Create a new scope with the given initial elements. * @group Scopes */ @@ -61,10 +55,4 @@ trait Scopes { self: Universe => */ def sorted: List[Symbol] } - - /** A tag that preserves the identity of the `MemberScope` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val MemberScopeTag: ClassTag[MemberScope] -}
\ No newline at end of file +} diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index dbad3dd478..7225919de5 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -61,12 +61,6 @@ trait Symbols { self: Universe => */ type Symbol >: Null <: SymbolApi - /** A tag that preserves the identity of the `Symbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SymbolTag: ClassTag[Symbol] - /** The type of type symbols representing type, class, and trait declarations, * as well as type parameters. * @group Symbols @@ -74,12 +68,6 @@ trait Symbols { self: Universe => */ type TypeSymbol >: Null <: Symbol with TypeSymbolApi - /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeSymbolTag: ClassTag[TypeSymbol] - /** The type of term symbols representing val, var, def, and object declarations as * well as packages and value parameters. * @group Symbols @@ -87,72 +75,36 @@ trait Symbols { self: Universe => */ type TermSymbol >: Null <: Symbol with TermSymbolApi - /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TermSymbolTag: ClassTag[TermSymbol] - /** The type of method symbols representing def declarations. * @group Symbols * @template */ type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi - /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val MethodSymbolTag: ClassTag[MethodSymbol] - /** The type of module symbols representing object declarations. * @group Symbols * @template */ type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi - /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ModuleSymbolTag: ClassTag[ModuleSymbol] - /** The type of class symbols representing class and trait definitions. * @group Symbols * @template */ type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi - /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ClassSymbolTag: ClassTag[ClassSymbol] - /** The type of free terms introduced by reification. * @group Symbols * @template */ type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi - /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol] - /** The type of free types introduced by reification. * @group Symbols * @template */ type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi - /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol] - /** A special "missing" symbol. Commonly used in the API to denote a default or empty value. * @group Symbols * @template diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 18985fe83d..99b5ef87f8 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -60,12 +60,6 @@ trait Trees { self: Universe => */ type Tree >: Null <: TreeApi - /** A tag that preserves the identity of the `Tree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TreeTag: ClassTag[Tree] - /** The API that all trees support. * The main source of information about trees is the [[scala.reflect.api.Trees]] page. * @group API @@ -230,12 +224,6 @@ trait Trees { self: Universe => */ type TermTree >: Null <: AnyRef with Tree with TermTreeApi - /** A tag that preserves the identity of the `TermTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TermTreeTag: ClassTag[TermTree] - /** The API that all term trees support * @group API */ @@ -249,12 +237,6 @@ trait Trees { self: Universe => */ type TypTree >: Null <: AnyRef with Tree with TypTreeApi - /** A tag that preserves the identity of the `TypTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypTreeTag: ClassTag[TypTree] - /** The API that all typ trees support * @group API */ @@ -267,12 +249,6 @@ trait Trees { self: Universe => */ type SymTree >: Null <: AnyRef with Tree with SymTreeApi - /** A tag that preserves the identity of the `SymTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SymTreeTag: ClassTag[SymTree] - /** The API that all sym trees support * @group API */ @@ -287,12 +263,6 @@ trait Trees { self: Universe => */ type NameTree >: Null <: AnyRef with Tree with NameTreeApi - /** A tag that preserves the identity of the `NameTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val NameTreeTag: ClassTag[NameTree] - /** The API that all name trees support * @group API */ @@ -311,12 +281,6 @@ trait Trees { self: Universe => */ type RefTree >: Null <: SymTree with NameTree with RefTreeApi - /** A tag that preserves the identity of the `RefTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val RefTreeTag: ClassTag[RefTree] - /** The API that all ref trees support * @group API */ @@ -337,12 +301,6 @@ trait Trees { self: Universe => */ type DefTree >: Null <: SymTree with NameTree with DefTreeApi - /** A tag that preserves the identity of the `DefTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val DefTreeTag: ClassTag[DefTree] - /** The API that all def trees support * @group API */ @@ -358,12 +316,6 @@ trait Trees { self: Universe => */ type MemberDef >: Null <: DefTree with MemberDefApi - /** A tag that preserves the identity of the `MemberDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val MemberDefTag: ClassTag[MemberDef] - /** The API that all member defs support * @group API */ @@ -378,12 +330,6 @@ trait Trees { self: Universe => */ type PackageDef >: Null <: MemberDef with PackageDefApi - /** A tag that preserves the identity of the `PackageDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val PackageDefTag: ClassTag[PackageDef] - /** The constructor/extractor for `PackageDef` instances. * @group Extractors */ @@ -417,12 +363,6 @@ trait Trees { self: Universe => */ type ImplDef >: Null <: MemberDef with ImplDefApi - /** A tag that preserves the identity of the `ImplDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ImplDefTag: ClassTag[ImplDef] - /** The API that all impl defs support * @group API */ @@ -437,12 +377,6 @@ trait Trees { self: Universe => */ type ClassDef >: Null <: ImplDef with ClassDefApi - /** A tag that preserves the identity of the `ClassDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ClassDefTag: ClassTag[ClassDef] - /** The constructor/extractor for `ClassDef` instances. * @group Extractors */ @@ -488,12 +422,6 @@ trait Trees { self: Universe => */ type ModuleDef >: Null <: ImplDef with ModuleDefApi - /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ModuleDefTag: ClassTag[ModuleDef] - /** The constructor/extractor for `ModuleDef` instances. * @group Extractors */ @@ -534,12 +462,6 @@ trait Trees { self: Universe => */ type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi - /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ValOrDefDefTag: ClassTag[ValOrDefDef] - /** The API that all val defs and def defs support * @group API */ @@ -571,12 +493,6 @@ trait Trees { self: Universe => */ type ValDef >: Null <: ValOrDefDef with ValDefApi - /** A tag that preserves the identity of the `ValDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ValDefTag: ClassTag[ValDef] - /** The constructor/extractor for `ValDef` instances. * @group Extractors */ @@ -626,12 +542,6 @@ trait Trees { self: Universe => */ type DefDef >: Null <: ValOrDefDef with DefDefApi - /** A tag that preserves the identity of the `DefDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val DefDefTag: ClassTag[DefDef] - /** The constructor/extractor for `DefDef` instances. * @group Extractors */ @@ -681,12 +591,6 @@ trait Trees { self: Universe => */ type TypeDef >: Null <: MemberDef with TypeDefApi - /** A tag that preserves the identity of the `TypeDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeDefTag: ClassTag[TypeDef] - /** The constructor/extractor for `TypeDef` instances. * @group Extractors */ @@ -746,12 +650,6 @@ trait Trees { self: Universe => */ type LabelDef >: Null <: DefTree with TermTree with LabelDefApi - /** A tag that preserves the identity of the `LabelDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val LabelDefTag: ClassTag[LabelDef] - /** The constructor/extractor for `LabelDef` instances. * @group Extractors */ @@ -808,12 +706,6 @@ trait Trees { self: Universe => */ type ImportSelector >: Null <: AnyRef with ImportSelectorApi - /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ImportSelectorTag: ClassTag[ImportSelector] - /** The constructor/extractor for `ImportSelector` instances. * @group Extractors */ @@ -860,12 +752,6 @@ trait Trees { self: Universe => */ type Import >: Null <: SymTree with ImportApi - /** A tag that preserves the identity of the `Import` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ImportTag: ClassTag[Import] - /** The constructor/extractor for `Import` instances. * @group Extractors */ @@ -918,12 +804,6 @@ trait Trees { self: Universe => */ type Template >: Null <: SymTree with TemplateApi - /** A tag that preserves the identity of the `Template` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TemplateTag: ClassTag[Template] - /** The constructor/extractor for `Template` instances. * @group Extractors */ @@ -976,12 +856,6 @@ trait Trees { self: Universe => */ type Block >: Null <: TermTree with BlockApi - /** A tag that preserves the identity of the `Block` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val BlockTag: ClassTag[Block] - /** The constructor/extractor for `Block` instances. * @group Extractors */ @@ -1021,12 +895,6 @@ trait Trees { self: Universe => */ type CaseDef >: Null <: AnyRef with Tree with CaseDefApi - /** A tag that preserves the identity of the `CaseDef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val CaseDefTag: ClassTag[CaseDef] - /** The constructor/extractor for `CaseDef` instances. * @group Extractors */ @@ -1074,12 +942,6 @@ trait Trees { self: Universe => */ type Alternative >: Null <: TermTree with AlternativeApi - /** A tag that preserves the identity of the `Alternative` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AlternativeTag: ClassTag[Alternative] - /** The constructor/extractor for `Alternative` instances. * @group Extractors */ @@ -1112,12 +974,6 @@ trait Trees { self: Universe => */ type Star >: Null <: TermTree with StarApi - /** A tag that preserves the identity of the `Star` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val StarTag: ClassTag[Star] - /** The constructor/extractor for `Star` instances. * @group Extractors */ @@ -1153,12 +1009,6 @@ trait Trees { self: Universe => */ type Bind >: Null <: DefTree with BindApi - /** A tag that preserves the identity of the `Bind` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val BindTag: ClassTag[Bind] - /** The constructor/extractor for `Bind` instances. * @group Extractors */ @@ -1222,12 +1072,6 @@ trait Trees { self: Universe => */ type UnApply >: Null <: TermTree with UnApplyApi - /** A tag that preserves the identity of the `UnApply` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val UnApplyTag: ClassTag[UnApply] - /** The constructor/extractor for `UnApply` instances. * @group Extractors */ @@ -1264,12 +1108,6 @@ trait Trees { self: Universe => */ type Function >: Null <: TermTree with SymTree with FunctionApi - /** A tag that preserves the identity of the `Function` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val FunctionTag: ClassTag[Function] - /** The constructor/extractor for `Function` instances. * @group Extractors */ @@ -1308,12 +1146,6 @@ trait Trees { self: Universe => */ type Assign >: Null <: TermTree with AssignApi - /** A tag that preserves the identity of the `Assign` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AssignTag: ClassTag[Assign] - /** The constructor/extractor for `Assign` instances. * @group Extractors */ @@ -1350,12 +1182,6 @@ trait Trees { self: Universe => */ type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi - /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg] - /** The constructor/extractor for `AssignOrNamedArg` instances. * @group Extractors */ @@ -1397,12 +1223,6 @@ trait Trees { self: Universe => */ type If >: Null <: TermTree with IfApi - /** A tag that preserves the identity of the `If` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val IfTag: ClassTag[If] - /** The constructor/extractor for `If` instances. * @group Extractors */ @@ -1454,12 +1274,6 @@ trait Trees { self: Universe => */ type Match >: Null <: TermTree with MatchApi - /** A tag that preserves the identity of the `Match` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val MatchTag: ClassTag[Match] - /** The constructor/extractor for `Match` instances. * @group Extractors */ @@ -1495,12 +1309,6 @@ trait Trees { self: Universe => */ type Return >: Null <: TermTree with SymTree with ReturnApi - /** A tag that preserves the identity of the `Return` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ReturnTag: ClassTag[Return] - /** The constructor/extractor for `Return` instances. * @group Extractors */ @@ -1533,12 +1341,6 @@ trait Trees { self: Universe => */ type Try >: Null <: TermTree with TryApi - /** A tag that preserves the identity of the `Try` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TryTag: ClassTag[Try] - /** The constructor/extractor for `Try` instances. * @group Extractors */ @@ -1577,12 +1379,6 @@ trait Trees { self: Universe => */ type Throw >: Null <: TermTree with ThrowApi - /** A tag that preserves the identity of the `Throw` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ThrowTag: ClassTag[Throw] - /** The constructor/extractor for `Throw` instances. * @group Extractors */ @@ -1613,12 +1409,6 @@ trait Trees { self: Universe => */ type New >: Null <: TermTree with NewApi - /** A tag that preserves the identity of the `New` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val NewTag: ClassTag[New] - /** The constructor/extractor for `New` instances. * @group Extractors */ @@ -1669,12 +1459,6 @@ trait Trees { self: Universe => */ type Typed >: Null <: TermTree with TypedApi - /** A tag that preserves the identity of the `Typed` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypedTag: ClassTag[Typed] - /** The constructor/extractor for `Typed` instances. * @group Extractors */ @@ -1708,12 +1492,6 @@ trait Trees { self: Universe => */ type GenericApply >: Null <: TermTree with GenericApplyApi - /** A tag that preserves the identity of the `GenericApply` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val GenericApplyTag: ClassTag[GenericApply] - /** The API that all applies support * @group API */ @@ -1735,12 +1513,6 @@ trait Trees { self: Universe => */ type TypeApply >: Null <: GenericApply with TypeApplyApi - /** A tag that preserves the identity of the `TypeApply` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeApplyTag: ClassTag[TypeApply] - /** The constructor/extractor for `TypeApply` instances. * @group Extractors */ @@ -1779,12 +1551,6 @@ trait Trees { self: Universe => */ type Apply >: Null <: GenericApply with ApplyApi - /** A tag that preserves the identity of the `Apply` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ApplyTag: ClassTag[Apply] - /** The constructor/extractor for `Apply` instances. * @group Extractors */ @@ -1822,12 +1588,6 @@ trait Trees { self: Universe => */ type Super >: Null <: TermTree with SuperApi - /** A tag that preserves the identity of the `Super` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SuperTag: ClassTag[Super] - /** The constructor/extractor for `Super` instances. * @group Extractors */ @@ -1874,12 +1634,6 @@ trait Trees { self: Universe => */ type This >: Null <: TermTree with SymTree with ThisApi - /** A tag that preserves the identity of the `This` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ThisTag: ClassTag[This] - /** The constructor/extractor for `This` instances. * @group Extractors */ @@ -1915,12 +1669,6 @@ trait Trees { self: Universe => */ type Select >: Null <: RefTree with SelectApi - /** A tag that preserves the identity of the `Select` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SelectTag: ClassTag[Select] - /** The constructor/extractor for `Select` instances. * @group Extractors */ @@ -1960,12 +1708,6 @@ trait Trees { self: Universe => */ type Ident >: Null <: RefTree with IdentApi - /** A tag that preserves the identity of the `Ident` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val IdentTag: ClassTag[Ident] - /** The constructor/extractor for `Ident` instances. * @group Extractors */ @@ -2005,12 +1747,6 @@ trait Trees { self: Universe => */ type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi - /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed] - /** The constructor/extractor for `ReferenceToBoxed` instances. * @group Extractors */ @@ -2055,12 +1791,6 @@ trait Trees { self: Universe => */ type Literal >: Null <: TermTree with LiteralApi - /** A tag that preserves the identity of the `Literal` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val LiteralTag: ClassTag[Literal] - /** The constructor/extractor for `Literal` instances. * @group Extractors */ @@ -2094,12 +1824,6 @@ trait Trees { self: Universe => */ type Annotated >: Null <: AnyRef with Tree with AnnotatedApi - /** A tag that preserves the identity of the `Annotated` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AnnotatedTag: ClassTag[Annotated] - /** The constructor/extractor for `Annotated` instances. * @group Extractors */ @@ -2134,12 +1858,6 @@ trait Trees { self: Universe => */ type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi - /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree] - /** The constructor/extractor for `SingletonTypeTree` instances. * @group Extractors */ @@ -2170,12 +1888,6 @@ trait Trees { self: Universe => */ type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi - /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree] - /** The constructor/extractor for `SelectFromTypeTree` instances. * @group Extractors */ @@ -2217,12 +1929,6 @@ trait Trees { self: Universe => */ type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi - /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree] - /** The constructor/extractor for `CompoundTypeTree` instances. * @group Extractors */ @@ -2253,12 +1959,6 @@ trait Trees { self: Universe => */ type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi - /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree] - /** The constructor/extractor for `AppliedTypeTree` instances. * @group Extractors */ @@ -2301,12 +2001,6 @@ trait Trees { self: Universe => */ type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi - /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree] - /** The constructor/extractor for `TypeBoundsTree` instances. * @group Extractors */ @@ -2344,12 +2038,6 @@ trait Trees { self: Universe => */ type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi - /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree] - /** The constructor/extractor for `ExistentialTypeTree` instances. * @group Extractors */ @@ -2387,12 +2075,6 @@ trait Trees { self: Universe => */ type TypeTree >: Null <: TypTree with TypeTreeApi - /** A tag that preserves the identity of the `TypeTree` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeTreeTag: ClassTag[TypeTree] - /** The constructor/extractor for `TypeTree` instances. * @group Extractors */ @@ -2990,12 +2672,6 @@ trait Trees { self: Universe => */ type Modifiers >: Null <: AnyRef with ModifiersApi - /** A tag that preserves the identity of the `Modifiers` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Traversal - */ - implicit val ModifiersTag: ClassTag[Modifiers] - /** The API that all Modifiers support * @group API */ diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index e988971ace..7457910226 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -221,24 +221,7 @@ trait TypeTags { self: Universe => def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): WeakTypeTag[T] = - tpec1(mirror1) match { - case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]] - case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]] - case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]] - case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]] - case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]] - case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]] - case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]] - case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]] - case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]] - case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]] - case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]] - case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]] - case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]] - case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]] - case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]] - case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1) - } + new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1) def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe) } @@ -299,24 +282,7 @@ trait TypeTags { self: Universe => val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null) def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] = - tpec1(mirror1) match { - case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]] - case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]] - case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]] - case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]] - case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]] - case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]] - case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]] - case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]] - case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]] - case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]] - case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]] - case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]] - case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]] - case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]] - case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]] - case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1) - } + new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1) def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe) } diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index e5140f23e5..1152f97350 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -50,7 +50,7 @@ package api * * @contentDiagram hideNodes "*Api" */ -trait Types extends ImplicitTags { +trait Types { self: Universe => /** The type of Scala types, and also Scala type signatures. diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala index 4928b8bb38..16e533cf7b 100644 --- a/src/reflect/scala/reflect/api/Universe.scala +++ b/src/reflect/scala/reflect/api/Universe.scala @@ -68,6 +68,7 @@ abstract class Universe extends Symbols with Exprs with TypeTags with TagInterop + with ImplicitTags with StandardDefinitions with StandardNames with BuildUtils @@ -93,4 +94,4 @@ abstract class Universe extends Symbols // implementation is hardwired to `scala.reflect.reify.Taggers` // using the mechanism implemented in `scala.tools.reflect.FastTrack` def reify[T](expr: T): Expr[T] = ??? // macro -}
\ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index 78f7438429..faf61e5205 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -72,6 +72,9 @@ object ClassfileConstants { final val CONSTANT_METHODREF = 10 final val CONSTANT_INTFMETHODREF = 11 final val CONSTANT_NAMEANDTYPE = 12 + final val CONSTANT_METHODHANDLE = 15 + final val CONSTANT_METHODTYPE = 16 + final val CONSTANT_INVOKEDYNAMIC = 18 // tags describing the type of a literal in attribute values final val BYTE_TAG = 'B' @@ -306,7 +309,7 @@ object ClassfileConstants { final val invokespecial = 0xb7 final val invokestatic = 0xb8 final val invokeinterface = 0xb9 - final val xxxunusedxxxx = 0xba + final val invokedynamic = 0xba final val new_ = 0xbb final val newarray = 0xbc diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 281a32caf6..34bd400186 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -31,4 +31,81 @@ trait ExistentialsAndSkolems { } (new Deskolemizer).typeSkolems } + + def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type? + sym.isTypeParameter && sym.owner.isJavaDefined + + /** If we map a set of hidden symbols to their existential bounds, we + * have a problem: the bounds may themselves contain references to the + * hidden symbols. So this recursively calls existentialBound until + * the typeSymbol is not amongst the symbols being hidden. + */ + private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = { + def safeBound(t: Type): Type = + if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t + + def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match { + case tp @ RefinedType(parents, decls) => + val parents1 = parents mapConserve safeBound + if (parents eq parents1) tp + else copyRefinedType(tp, parents1, decls) + case tp => tp + } + + // Hanging onto lower bound in case anything interesting + // happens with it. + mapFrom(hidden)(s => s.existentialBound match { + case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s)) + case _ => hiBound(s) + }) + } + + /** Given a set `rawSyms` of term- and type-symbols, and a type + * `tp`, produce a set of fresh type parameters and a type so that + * it can be abstracted to an existential type. Every type symbol + * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of + * type `T` in `rawSyms` is given an associated type symbol of the + * following form: + * + * type x.type <: T with Singleton + * + * The name of the type parameter is `x.type`, to produce nice + * diagnostics. The Singleton parent ensures that the type + * parameter is still seen as a stable type. Type symbols in + * rawSyms are fully replaced by the new symbols. Term symbols are + * also replaced, except for term symbols of an Ident tree, where + * only the type of the Ident is changed. + */ + final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None)(creator: (List[Symbol], Type) => T): T = { + val allBounds = existentialBoundsExcludingHidden(rawSyms) + val typeParams: List[Symbol] = rawSyms map { sym => + val name = sym.name match { + case x: TypeName => x + case x => tpnme.singletonName(x) + } + def rawOwner0 = rawOwner.getOrElse(abort(s"no owner provided for existential transform over raw parameter: $sym")) + val bound = allBounds(sym) + val sowner = if (isRawParameter(sym)) rawOwner0 else sym.owner + val quantified = sowner.newExistential(name, sym.pos) + + quantified setInfo bound.cloneInfo(quantified) + } + // Higher-kinded existentials are not yet supported, but this is + // tpeHK for when they are: "if a type constructor is expected/allowed, + // tpeHK must be called instead of tpe." + val typeParamTypes = typeParams map (_.tpeHK) + def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes) + + creator(typeParams map (_ modifyInfo doSubst), doSubst(tp)) + } + + /** + * Compute an existential type from hidden symbols `hidden` and type `tp`. + * @param hidden The symbols that will be existentially abstracted + * @param hidden The original type + * @param rawOwner The owner for Java raw types. + */ + final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None): Type = + if (hidden.isEmpty) tp + else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction) } diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala new file mode 100644 index 0000000000..3d1d1bf451 --- /dev/null +++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala @@ -0,0 +1,45 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ +package scala.reflect +package internal + +import java.lang.{ Class => jClass } +import java.lang.annotation.{ Annotation => jAnnotation } +import java.lang.reflect.{ + Member => jMember, Constructor => jConstructor, Method => jMethod, + AnnotatedElement => jAnnotatedElement, Type => jType, + TypeVariable => jTypeVariable +} + +/** This class tries to abstract over some of the duplication + * in java.lang.reflect.{ Method, Constructor }. + */ +class JMethodOrConstructor(val member: jMember with jAnnotatedElement) { + def isVarArgs: Boolean = member match { + case m: jMethod => m.isVarArgs + case m: jConstructor[_] => m.isVarArgs + } + def typeParams: Array[_ <: jTypeVariable[_]] = member match { + case m: jMethod => m.getTypeParameters + case m: jConstructor[_] => m.getTypeParameters + } + def paramTypes: Array[jType] = member match { + case m: jMethod => m.getGenericParameterTypes + case m: jConstructor[_] => m.getGenericParameterTypes + } + def paramAnnotations: Array[Array[jAnnotation]] = member match { + case m: jMethod => m.getParameterAnnotations + case m: jConstructor[_] => m.getParameterAnnotations + } + def resultType: jType = member match { + case m: jMethod => m.getGenericReturnType + case m: jConstructor[_] => classOf[Unit] + } +} + +object JMethodOrConstructor { + implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor = new JMethodOrConstructor(m) + implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = new JMethodOrConstructor(m) +} diff --git a/src/reflect/scala/reflect/internal/JavaAccFlags.scala b/src/reflect/scala/reflect/internal/JavaAccFlags.scala new file mode 100644 index 0000000000..4be1f828d3 --- /dev/null +++ b/src/reflect/scala/reflect/internal/JavaAccFlags.scala @@ -0,0 +1,83 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ +package scala.reflect +package internal + +import java.lang.{ Class => jClass } +import java.lang.reflect.{ Member => jMember, Constructor => jConstructor, Field => jField, Method => jMethod } +import JavaAccFlags._ +import ClassfileConstants._ + +/** A value class which encodes the access_flags (JVMS 4.1) + * for a field, method, or class. The low 16 bits are the same + * as those returned by java.lang.reflect.Member#getModifiers + * and found in the bytecode. + * + * The high bits encode whether the access flags are directly + * associated with a class, constructor, field, or method. + */ +final class JavaAccFlags private (val coded: Int) extends AnyVal { + private def has(mask: Int) = (flags & mask) != 0 + private def flagCarrierId = coded >>> 16 + private def flags = coded & 0xFFFF + + def isAbstract = has(JAVA_ACC_ABSTRACT) + def isAnnotation = has(JAVA_ACC_ANNOTATION) + def isBridge = has(JAVA_ACC_BRIDGE) + def isEnum = has(JAVA_ACC_ENUM) + def isFinal = has(JAVA_ACC_FINAL) + def isInterface = has(JAVA_ACC_INTERFACE) + def isNative = has(JAVA_ACC_NATIVE) + def isPrivate = has(JAVA_ACC_PRIVATE) + def isProtected = has(JAVA_ACC_PROTECTED) + def isPublic = has(JAVA_ACC_PUBLIC) + def isStatic = has(JAVA_ACC_STATIC) + def isStrictFp = has(JAVA_ACC_STRICT) + def isSuper = has(JAVA_ACC_SUPER) + def isSynchronized = has(JAVA_ACC_SYNCHRONIZED) + def isSynthetic = has(JAVA_ACC_SYNTHETIC) + def isTransient = has(JAVA_ACC_TRANSIENT) + def isVarargs = has(JAVA_ACC_VARARGS) + def isVolatile = has(JAVA_ACC_VOLATILE) + + /** Do these flags describe a member which has either protected or package access? + * Such access in java is encoded in scala as protected[foo] or private[foo], where + * `foo` is the defining package. + */ + def hasPackageAccessBoundary = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PUBLIC) // equivalently, allows protected or package level access + def isPackageProtected = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC) + + def toJavaFlags: Int = flags + def toScalaFlags: Long = flagCarrierId match { + case Method | Constructor => FlagTranslation methodFlags flags + case Class => FlagTranslation classFlags flags + case _ => FlagTranslation fieldFlags flags + } +} + +object JavaAccFlags { + private val Unknown = 0 + private val Class = 1 + private val Field = 2 + private val Method = 3 + private val Constructor = 4 + + private def create(flagCarrier: Int, access_flags: Int): JavaAccFlags = + new JavaAccFlags((flagCarrier << 16) | (access_flags & 0xFFFF)) + + def classFlags(flags: Int): JavaAccFlags = create(Class, flags) + def methodFlags(flags: Int): JavaAccFlags = create(Method, flags) + def fieldFlags(flags: Int): JavaAccFlags = create(Field, flags) + def constructorFlags(flags: Int): JavaAccFlags = create(Constructor, flags) + + def apply(access_flags: Int): JavaAccFlags = create(Unknown, access_flags) + def apply(clazz: jClass[_]): JavaAccFlags = classFlags(clazz.getModifiers) + def apply(member: jMember): JavaAccFlags = member match { + case x: jConstructor[_] => constructorFlags(x.getModifiers) + case x: jMethod => methodFlags(x.getModifiers) + case x: jField => fieldFlags(x.getModifiers) + case _ => apply(member.getModifiers) + } +} diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index cc8dd16d69..3c49aef05a 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -145,7 +145,7 @@ trait Kinds { kindErrors = f(kindErrors) } - if (settings.debug.value) { + if (settings.debug) { log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner) log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner) log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) @@ -201,7 +201,7 @@ trait Kinds { else NoKindErrors } - if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log( + if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log( "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")" ) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 63178d0b39..81d7619f22 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -46,7 +46,7 @@ trait Mirrors extends api.Mirrors { val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym if (result != NoSymbol) result else { - if (settings.debug.value) { log(sym.info); log(sym.info.members) }//debug + if (settings.debug) { log(sym.info); log(sym.info.members) }//debug thisMirror.missingHook(owner, name) orElse { MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror) } diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index cc32a0363f..3cf4f4a1df 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -107,7 +107,7 @@ trait Positions extends api.Positions { self: SymbolTable => def validate(tree: Tree, encltree: Tree): Unit = { if (!tree.isEmpty && tree.canHaveAttrs) { - if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value)) + if (settings.Yposdebug && (settings.verbose || settings.Yrangepos)) println("[%10s] %s".format("validate", treeStatus(tree, encltree))) if (!tree.pos.isDefined) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index e1ef6d6365..5b80889225 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -169,7 +169,7 @@ trait Printers extends api.Printers { self: SymbolTable => ) def printFlags(flags: Long, privateWithin: String) { - val mask: Long = if (settings.debug.value) -1L else PrintableFlags + val mask: Long = if (settings.debug) -1L else PrintableFlags val s = flagsToString(flags & mask, privateWithin) if (s != "") print(s + " ") } @@ -376,7 +376,7 @@ trait Printers extends api.Printers { self: SymbolTable => if (!qual.isEmpty) print(symName(tree, qual) + ".") print("this") - case Select(qual @ New(tpe), name) if (!settings.debug.value) => + case Select(qual @ New(tpe), name) if !settings.debug => print(qual) case Select(qualifier, name) => diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala index 9b99b94b41..5646ac82ae 100644 --- a/src/reflect/scala/reflect/internal/PrivateWithin.scala +++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala @@ -1,23 +1,26 @@ package scala.reflect package internal -import ClassfileConstants._ +import java.lang.{ Class => jClass } +import java.lang.reflect.{ Member => jMember } trait PrivateWithin { self: SymbolTable => - def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = { - if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0) - // See ticket #1687 for an example of when topLevelClass is NoSymbol: it - // apparently occurs when processing v45.3 bytecode. - if (sym.enclosingTopLevelClass != NoSymbol) - sym.privateWithin = sym.enclosingTopLevelClass.owner - - // protected in java means package protected. #3946 - if ((jflags & JAVA_ACC_PROTECTED) != 0) - if (sym.enclosingTopLevelClass != NoSymbol) - sym.privateWithin = sym.enclosingTopLevelClass.owner - - sym + def propagatePackageBoundary(c: jClass[_], syms: Symbol*): Unit = + propagatePackageBoundary(JavaAccFlags(c), syms: _*) + def propagatePackageBoundary(m: jMember, syms: Symbol*): Unit = + propagatePackageBoundary(JavaAccFlags(m), syms: _*) + def propagatePackageBoundary(jflags: JavaAccFlags, syms: Symbol*) { + if (jflags.hasPackageAccessBoundary) + syms foreach setPackageAccessBoundary } -}
\ No newline at end of file + + // protected in java means package protected. #3946 + // See ticket #1687 for an example of when the enclosing top level class is NoSymbol; + // it apparently occurs when processing v45.3 bytecode. + def setPackageAccessBoundary(sym: Symbol): Symbol = ( + if (sym.enclosingTopLevelClass eq NoSymbol) sym + else sym setPrivateWithin sym.enclosingTopLevelClass.owner + ) +} diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 850c497d4b..371eddbc4f 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -6,6 +6,8 @@ package scala.reflect package internal +import scala.annotation.tailrec + trait Scopes extends api.Scopes { self: SymbolTable => /** An ADT to represent the results of symbol name lookups. @@ -65,6 +67,11 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** a cache for all elements, to be used by symbol iterator. */ private var elemsCache: List[Symbol] = null + private var cachedSize = -1 + private def flushElemsCache() { + elemsCache = null + cachedSize = -1 + } /** size and mask of hash tables * todo: make hashtables grow? @@ -86,6 +93,12 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** the number of entries in this scope */ override def size: Int = { + if (cachedSize < 0) + cachedSize = directSize + + cachedSize + } + private def directSize: Int = { var s = 0 var e = elems while (e ne null) { @@ -98,7 +111,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** enter a scope entry */ protected def enterEntry(e: ScopeEntry) { - elemsCache = null + flushElemsCache() if (hashtable ne null) enterInHash(e) else if (size >= MIN_HASH) @@ -192,7 +205,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => e1.tail = e.tail } } - elemsCache = null + flushElemsCache() } /** remove symbol */ @@ -304,16 +317,43 @@ trait Scopes extends api.Scopes { self: SymbolTable => e } + /** TODO - we can test this more efficiently than checking isSubScope + * in both directions. However the size test might be enough to quickly + * rule out most failures. + */ + def isSameScope(other: Scope) = ( + (size == other.size) // optimization - size is cached + && (this isSubScope other) + && (other isSubScope this) + ) + + def isSubScope(other: Scope) = { + def scopeContainsSym(sym: Symbol): Boolean = { + @tailrec def entryContainsSym(e: ScopeEntry): Boolean = e match { + case null => false + case _ => + val comparableInfo = sym.info.substThis(sym.owner, e.sym.owner) + (e.sym.info =:= comparableInfo) || entryContainsSym(lookupNextEntry(e)) + } + entryContainsSym(this lookupEntry sym.name) + } + other.toList forall scopeContainsSym + } + /** Return all symbols as a list in the order they were entered in this scope. */ override def toList: List[Symbol] = { if (elemsCache eq null) { - elemsCache = Nil + var symbols: List[Symbol] = Nil + var count = 0 var e = elems while ((e ne null) && e.owner == this) { - elemsCache = e.sym :: elemsCache + count += 1 + symbols ::= e.sym e = e.next } + elemsCache = symbols + cachedSize = count } elemsCache } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 4fd86aa8b1..ae2cf09c2e 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -359,8 +359,8 @@ trait StdNames { * be sure to retain the extra dollars. */ def unexpandedName(name: Name): Name = name lastIndexOf "$$" match { - case -1 => name - case idx0 => + case 0 | -1 => name + case idx0 => // Sketchville - We've found $$ but if it's part of $$$ or $$$$ // or something we need to keep the bonus dollars, so e.g. foo$$$outer // has an original name of $outer. diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 03ec59f0fe..336c2748c6 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -60,8 +60,8 @@ abstract class SymbolTable extends macros.Universe def debugwarn(msg: => String): Unit = devWarning(msg) /** Override with final implementation for inlining. */ - def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg) - def devWarning(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg) + def debuglog(msg: => String): Unit = if (settings.debug) log(msg) + def devWarning(msg: => String): Unit = if (settings.debug) Console.err.println(msg) def throwableAsString(t: Throwable): String = "" + t /** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */ diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 037d44c540..a87d002f25 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -190,7 +190,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def varianceString: String = variance.symbolicString override def flagMask = - if (settings.debug.value && !isAbstractType) AllFlags + if (settings.debug && !isAbstractType) AllFlags else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE else ExplicitFlags @@ -202,7 +202,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def shortSymbolClass = shortClassOfInstance(this) def symbolCreationString: String = ( "%s%25s | %-40s | %s".format( - if (settings.uniqid.value) "%06d | ".format(id) else "", + if (settings.uniqid) "%06d | ".format(id) else "", shortSymbolClass, name.decode + " in " + owner, rawFlagString @@ -828,7 +828,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Is this symbol effectively final? I.e, it cannot be overridden */ final def isEffectivelyFinal: Boolean = ( (this hasFlag FINAL | PACKAGE) - || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects.value) + || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects) || isTerm && ( isPrivate || isLocal @@ -887,9 +887,23 @@ trait Symbols extends api.Symbols { self: SymbolTable => supersym == NoSymbol || supersym.isIncompleteIn(base) } - // Does not always work if the rawInfo is a SourcefileLoader, see comment - // in "def coreClassesFirst" in Global. - def exists = !isTopLevel || { rawInfo.load(this); rawInfo != NoType } + def exists: Boolean = !isTopLevel || { + val isSourceLoader = rawInfo match { + case sl: SymLoader => sl.fromSource + case _ => false + } + def warnIfSourceLoader() { + if (isSourceLoader) + // Predef is completed early due to its autoimport; we used to get here when type checking its + // parent LowPriorityImplicits. See comment in c5441dc for more elaboration. + // Since the fix for SI-7335 Predef parents must be defined in Predef.scala, and we should not + // get here anymore. + devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results."); + } + + rawInfo load this + rawInfo != NoType || { warnIfSourceLoader(); false } + } final def isInitialized: Boolean = validTo != NoPeriod diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index b1f58814c7..3296353b6b 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -663,20 +663,9 @@ abstract class TreeInfo { unapply(dissectApplied(tree)) } - /** Does list of trees start with a definition of - * a class of module with given name (ignoring imports) - */ - def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match { - case Import(_, _) :: xs => firstDefinesClassOrObject(xs, name) - case Annotated(_, tree1) :: Nil => firstDefinesClassOrObject(List(tree1), name) - case ModuleDef(_, `name`, _) :: Nil => true - case ClassDef(_, `name`, _, _) :: Nil => true - case _ => false - } - - /** Is this file the body of a compilation unit which should not - * have Predef imported? + * have Predef imported? This is the case iff the first import in the + * unit explicitly refers to Predef. */ def noPredefImportForUnit(body: Tree) = { // Top-level definition whose leading imports include Predef. @@ -685,13 +674,7 @@ abstract class TreeInfo { case Import(expr, _) => isReferenceToPredef(expr) case _ => false } - // Compilation unit is class or object 'name' in package 'scala' - def isUnitInScala(tree: Tree, name: Name) = tree match { - case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name) - case _ => false - } - - isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body) + isLeadingPredefImport(body) } def isAbsTypeDef(tree: Tree) = tree match { diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 410bc738e2..7467ccc6b9 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1491,6 +1491,11 @@ trait Trees extends api.Trees { self: SymbolTable => /** Substitute symbols in `from` with symbols in `to`. Returns a new * tree using the new symbols and whose Ident and Select nodes are * name-consistent with the new symbols. + * + * Note: This is currently a destructive operation on the original Tree. + * Trees currently assigned a symbol in `from` will be assigned the new symbols + * without copying, and trees that define symbols with an `info` that refer + * a symbol in `from` will have a new type assigned. */ class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer { val symSubst = new SubstSymMap(from, to) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index d6eeb68452..60d9e1c3cd 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -871,7 +871,7 @@ trait Types def baseTypeSeqDepth: Int = 1 /** The list of all baseclasses of this type (including its own typeSymbol) - * in reverse linearization order, starting with the class itself and ending + * in linearization order, starting with the class itself and ending * in class Any. */ def baseClasses: List[Symbol] = List() @@ -1306,7 +1306,7 @@ trait Types override def isVolatile = false override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded override def prefixString = - if (settings.debug.value) sym.nameString + ".this." + if (settings.debug) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" else if (sym.isModuleClass) sym.fullNameString + "." @@ -1522,7 +1522,7 @@ trait Types typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement) override def safeToString: String = parentsString(parents) + ( - (if (settings.debug.value || parents.isEmpty || (decls.elems ne null)) + (if (settings.debug || parents.isEmpty || (decls.elems ne null)) fullyInitializeScope(decls).mkString("{", "; ", "}") else "") ) } @@ -1622,7 +1622,7 @@ trait Types object baseClassesCycleMonitor { private var open: List[Symbol] = Nil @inline private def cycleLog(msg: => String) { - if (settings.debug.value) + if (settings.debug) Console.err.println(msg) } def size = open.size @@ -1868,7 +1868,7 @@ trait Types tp match { case tr @ TypeRef(_, sym, args) if args.nonEmpty => val tparams = tr.initializedTypeParams - if (settings.debug.value && !sameLength(tparams, args)) + if (settings.debug && !sameLength(tparams, args)) devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args") foreach2(tparams, args) { (tparam1, arg) => @@ -1934,7 +1934,7 @@ trait Types override def kind = "ClassInfoType" override def safeToString = - if (settings.debug.value || decls.size > 1) + if (settings.debug || decls.size > 1) formattedToString else super.safeToString @@ -1943,7 +1943,7 @@ trait Types */ def formattedToString: String = parents.mkString("\n with ") + ( - if (settings.debug.value || parents.isEmpty || (decls.elems ne null)) + if (settings.debug || parents.isEmpty || (decls.elems ne null)) fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}") else "" ) @@ -2352,7 +2352,7 @@ trait Types // ensure that symbol is not a local copy with a name coincidence private def needsPreString = ( - settings.debug.value + settings.debug || !shorthands(sym.fullName) || (sym.ownersIterator exists (s => !s.isClass)) ) @@ -2403,12 +2403,12 @@ trait Types "" } override def safeToString = { - val custom = if (settings.debug.value) "" else customToString + val custom = if (settings.debug) "" else customToString if (custom != "") custom else finishPrefix(preString + sym.nameString + argsString) } override def prefixString = "" + ( - if (settings.debug.value) + if (settings.debug) super.prefixString else if (sym.isOmittablePrefix) "" @@ -2722,10 +2722,10 @@ trait Types override def safeToString: String = { def clauses = { val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }") - if (settings.explaintypes.value) "(" + str + ")" else str + if (settings.explaintypes) "(" + str + ")" else str } underlying match { - case TypeRef(pre, sym, args) if !settings.debug.value && isRepresentableWithWildcards => + case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards => "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]") case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => "(" + underlying + ")" + clauses @@ -2956,12 +2956,14 @@ trait Types /** The variable's skolemization level */ val level = skolemizationLevel - /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to - * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`. + /** Applies this TypeVar to type arguments, if arity matches. * - * `constr` for `?CC` only tracks type constructors anyway, - * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]` - * `?CC's` hibounds contains List and Iterable. + * Different applications of the same type constructor variable `?CC`, + * e.g. `?CC[Int]` and `?CC[String]`, are modeled as distinct instances of `TypeVar` + * that share a `TypeConstraint`, so that the comparisons `?CC[Int] <:< List[Int]` + * and `?CC[String] <:< Iterable[String]` result in `?CC` being upper-bounded by `List` and `Iterable`. + * + * Applying the wrong number of type args results in a TypeVar whose instance is set to `ErrorType`. */ def applyArgs(newArgs: List[Type]): TypeVar = ( if (newArgs.isEmpty && typeArgs.isEmpty) @@ -2971,7 +2973,7 @@ trait Types TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv) } else - throw new Error("Invalid type application in TypeVar: " + params + ", " + newArgs) + TypeVar(typeSymbol).setInst(ErrorType) ) // newArgs.length may differ from args.length (could've been empty before) // @@ -3001,16 +3003,17 @@ trait Types // <region name="constraint mutators + undoLog"> // invariant: before mutating constr, save old state in undoLog // (undoLog is used to reset constraints to avoid piling up unrelated ones) - def setInst(tp: Type) { + def setInst(tp: Type): this.type = { if (tp eq this) { log(s"TypeVar cycle: called setInst passing $this to itself.") - return + return this } undoLog record this // if we were compared against later typeskolems, repack the existential, // because skolems are only compatible if they were created at the same level val res = if (shouldRepackType) repackExistential(tp) else tp constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res) + this } def addLoBound(tp: Type, isNumericBound: Boolean = false) { @@ -3228,7 +3231,7 @@ trait Types if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#" } - private def levelString = if (settings.explaintypes.value) level else "" + private def levelString = if (settings.explaintypes) level else "" override def safeToString = ( if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>" else if (constr.inst ne NoType) "=?" + constr.inst @@ -3760,10 +3763,11 @@ trait Types if (tp.isTrivial) tp else if (tp.prefix.typeSymbol isNonBottomSubClass owner) { val widened = tp match { - case _: ConstantType => tp // Java enum constants: don't widen to the enum type! - case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect. + case _: ConstantType => tp // Java enum constants: don't widen to the enum type! + case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect. } - widened asSeenFrom (pre, tp.typeSymbol.owner) + val memType = widened asSeenFrom (pre, tp.typeSymbol.owner) + if (tp eq widened) memType else memType.narrow } else loop(tp.prefix) memberType tp.typeSymbol @@ -3791,7 +3795,7 @@ trait Types * the maximum depth `bd` of all types in the base type sequences of these types. */ private def lubDepthAdjust(td: Int, bd: Int): Int = - if (settings.XfullLubs.value) bd + if (settings.XfullLubs) bd else if (bd <= 3) bd else if (bd <= 5) td max (bd - 1) else if (bd <= 7) td max (bd - 2) @@ -3883,9 +3887,14 @@ trait Types } } - def normalizePlus(tp: Type) = + def normalizePlus(tp: Type) = ( if (isRawType(tp)) rawToExistential(tp) - else tp.normalize + else tp.normalize match { + // Unify the two representations of module classes + case st @ SingleType(_, sym) if sym.isModule => st.underlying.normalize + case _ => tp.normalize + } + ) /* todo: change to: @@ -4407,7 +4416,7 @@ trait Types /** An exception for cyclic references from which we can recover */ case class RecoverableCyclicReference(sym: Symbol) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug.value) printStackTrace() + if (settings.debug) printStackTrace() } class NoCommonType(tps: List[Type]) extends Throwable( @@ -4433,12 +4442,12 @@ trait Types /** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */ def explainTypes(found: Type, required: Type) { - if (settings.explaintypes.value) withTypesExplained(found <:< required) + if (settings.explaintypes) withTypesExplained(found <:< required) } /** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */ def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) { - if (settings.explaintypes.value) withTypesExplained(op(found, required)) + if (settings.explaintypes) withTypesExplained(op(found, required)) } /** Execute `op` while printing a trace of the operations on types executed. */ diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index e7a1ea9311..68f9fc8e83 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -49,3 +49,8 @@ abstract class MutableSettings extends AbsSettings { def XfullLubs: BooleanSetting def breakCycles: BooleanSetting } +object MutableSettings { + import scala.language.implicitConversions + /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */ + @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value +} diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 5bdc5f8a73..921d2e3d66 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -366,7 +366,7 @@ private[internal] trait GlbLubs { // parameters are not handled correctly. val ok = ts forall { t => isSubType(t, lubRefined, depth) || { - if (settings.debug.value || printLubs) { + if (settings.debug || printLubs) { Console.println( "Malformed lub: " + lubRefined + "\n" + "Argument " + t + " does not conform. Falling back to " + lubBase diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index e9d3ffbf56..da8e64ea16 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -52,14 +52,17 @@ trait TypeComparers { private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) = if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) { - if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2") + if (settings.debug) println(s"new isSubPre $sym: $pre1 <:< $pre2") true } else false - private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = - if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2 - else (sym1.name == sym2.name) && isUnifiable(pre1, pre2) + private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = ( + if (sym1 == sym2) + sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2 + else + (sym1.name == sym2.name) && isUnifiable(pre1, pre2) + ) def isDifferentType(tp1: Type, tp2: Type): Boolean = try { @@ -126,7 +129,13 @@ trait TypeComparers { tp2.typeSymbol.isPackageClass else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec tp1.typeSymbol.isPackageClass + else if (tp1.isInstanceOf[AnnotatedType] || tp2.isInstanceOf[AnnotatedType]) + annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && (tp1.withoutAnnotations =:= tp2.withoutAnnotations) else { + // We flush out any AnnotatedTypes before calling isSameType2 because + // unlike most other subclasses of Type, we have to allow for equivalence of any + // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the + // logic of "annotationsConform" is arbitrary and unknown. isSameType2(tp1, tp2) || { val tp1n = normalizePlus(tp1) val tp2n = normalizePlus(tp2) @@ -135,165 +144,99 @@ trait TypeComparers { } } - def isSameType2(tp1: Type, tp2: Type): Boolean = { - tp1 match { - case tr1: TypeRef => - tp2 match { - case tr2: TypeRef => - return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) && - ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) || - isSameTypes(tr1.args, tr2.args))) || - ((tr1.pre, tr2.pre) match { - case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2) - case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1) - case _ => false - }) - case _: SingleType => - return isSameType2(tp2, tp1) // put singleton type on the left, caught below - case _ => - } - case tt1: ThisType => - tp2 match { - case tt2: ThisType => - if (tt1.sym == tt2.sym) return true - case _ => - } - case st1: SingleType => - tp2 match { - case st2: SingleType => - if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true - case TypeRef(pre2, sym2, Nil) => - if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true - case _ => - } - case ct1: ConstantType => - tp2 match { - case ct2: ConstantType => - return (ct1.value == ct2.value) - case _ => - } - case rt1: RefinedType => - tp2 match { - case rt2: RefinedType => // - def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall { - sym2 => - var e1 = s1.lookupEntry(sym2.name) - (e1 ne null) && { - val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner) - var isEqual = false - while (!isEqual && (e1 ne null)) { - isEqual = e1.sym.info =:= substSym - e1 = s1.lookupNextEntry(e1) - } - isEqual - } - } - //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG - return isSameTypes(rt1.parents, rt2.parents) && { - val decls1 = rt1.decls - val decls2 = rt2.decls - isSubScope(decls1, decls2) && isSubScope(decls2, decls1) - } - case _ => - } - case mt1: MethodType => - tp2 match { - case mt2: MethodType => - return isSameTypes(mt1.paramTypes, mt2.paramTypes) && - mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) && - mt1.isImplicit == mt2.isImplicit - // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe - case _ => - } - case NullaryMethodType(restpe1) => - tp2 match { - // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType - case NullaryMethodType(restpe2) => - return restpe1 =:= restpe2 - case _ => - } - case PolyType(tparams1, res1) => - tp2 match { - case PolyType(tparams2, res2) => - // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length))) - // @M looks like it might suffer from same problem as #2210 - return ( - (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate - (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && - res1 =:= res2.substSym(tparams2, tparams1) - ) - case _ => - } - case ExistentialType(tparams1, res1) => - tp2 match { - case ExistentialType(tparams2, res2) => - // @M looks like it might suffer from same problem as #2210 - return ( - // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956 - sameLength(tparams1, tparams2) && - (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && - res1 =:= res2.substSym(tparams2, tparams1) - ) - case _ => - } - case TypeBounds(lo1, hi1) => - tp2 match { - case TypeBounds(lo2, hi2) => - return lo1 =:= lo2 && hi1 =:= hi2 - case _ => - } - case BoundedWildcardType(bounds) => - return bounds containsType tp2 - case _ => - } - tp2 match { - case BoundedWildcardType(bounds) => - return bounds containsType tp1 - case _ => - } - tp1 match { - case tv @ TypeVar(_,_) => - return tv.registerTypeEquality(tp2, typeVarLHS = true) - case _ => - } - tp2 match { - case tv @ TypeVar(_,_) => - return tv.registerTypeEquality(tp1, typeVarLHS = false) - case _ => + private def isSameHKTypes(tp1: Type, tp2: Type) = ( + tp1.isHigherKinded + && tp2.isHigherKinded + && (tp1.normalize =:= tp2.normalize) + ) + private def isSameTypeRef(tr1: TypeRef, tr2: TypeRef) = ( + equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) + && (isSameHKTypes(tr1, tr2) || isSameTypes(tr1.args, tr2.args)) + ) + + private def isSameSingletonType(tp1: SingletonType, tp2: SingletonType): Boolean = { + // We don't use dealiasWiden here because we are looking for the SAME type, + // and widening leads to a less specific type. The logic is along the lines of + // dealiasAndFollowUnderlyingAsLongAsTheTypeIsEquivalent. This method is only + // called after a surface comparison has failed, so if chaseDealiasedUnderlying + // does not produce a type other than tp1 and tp2, return false. + @tailrec def chaseDealiasedUnderlying(tp: Type): Type = tp.underlying.dealias match { + case next: SingletonType if tp ne next => chaseDealiasedUnderlying(next) + case _ => tp } - tp1 match { - case _: AnnotatedType => - return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations - case _ => + val origin1 = chaseDealiasedUnderlying(tp1) + val origin2 = chaseDealiasedUnderlying(tp2) + ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2) + } + + private def isSameMethodType(mt1: MethodType, mt2: MethodType) = ( + isSameTypes(mt1.paramTypes, mt2.paramTypes) + && (mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params)) + && (mt1.isImplicit == mt2.isImplicit) + ) + + private def equalTypeParamsAndResult(tparams1: List[Symbol], res1: Type, tparams2: List[Symbol], res2: Type) = { + def subst(info: Type) = info.substSym(tparams2, tparams1) + // corresponds does not check length of two sequences before checking the predicate, + // but SubstMap assumes it has been checked (SI-2956) + ( sameLength(tparams1, tparams2) + && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info)) + && (res1 =:= subst(res2)) + ) + } + + def isSameType2(tp1: Type, tp2: Type): Boolean = { + /** Here we highlight those unfortunate type-like constructs which + * are hidden bundles of mutable state, cruising the type system picking + * up any type constraints naive enough to get into their hot rods. + */ + def mutateNonTypeConstructs(lhs: Type, rhs: Type) = lhs match { + case BoundedWildcardType(bounds) => bounds containsType rhs + case tv @ TypeVar(_, _) => tv.registerTypeEquality(rhs, typeVarLHS = lhs eq tp1) + case TypeRef(tv @ TypeVar(_, _), sym, _) => tv.registerTypeSelection(sym, rhs) + case _ => false } - tp2 match { - case _: AnnotatedType => - return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations - case _ => + /* SingletonType receives this additional scrutiny because there are + * a variety of Types which must be treated as equivalent even if they + * arrive in different guises. For instance, object Foo in the following + * might appear in (at least) the four given below. + * + * package pkg { object Foo ; type Bar = Foo.type } + * + * ModuleClassTypeRef(pkg.type, Foo: ModuleClassSymbol, Nil) + * ThisType(Foo: ModuleClassSymbol) + * SingleType(pkg.type, Foo: ModuleSymbol) + * AliasTypeRef(NoPrefix, sym: AliasSymbol, Nil) where sym.info is one of the above + */ + def sameSingletonType = tp1 match { + case tp1: SingletonType => tp2 match { + case tp2: SingletonType => isSameSingletonType(tp1, tp2) + case _ => false + } + case _ => false } - tp1 match { - case _: SingletonType => - tp2 match { - case _: SingletonType => - def chaseDealiasedUnderlying(tp: Type): Type = { - var origin = tp - var next = origin.underlying.dealias - while (next.isInstanceOf[SingletonType]) { - assert(origin ne next, origin) - origin = next - next = origin.underlying.dealias - } - origin - } - val origin1 = chaseDealiasedUnderlying(tp1) - val origin2 = chaseDealiasedUnderlying(tp2) - ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2) - case _ => - false - } - case _ => - false + /** Those false cases certainly are ugly. There's a proposed SIP to deuglify it. + * https://docs.google.com/a/improving.org/document/d/1onPrzSqyDpHScc9PS_hpxJwa3FlPtthxw-bAuuEe8uA + */ + def sameTypeAndSameCaseClass = tp1 match { + case tp1: TypeRef => tp2 match { case tp2: TypeRef => isSameTypeRef(tp1, tp2) ; case _ => false } + case tp1: MethodType => tp2 match { case tp2: MethodType => isSameMethodType(tp1, tp2) ; case _ => false } + case RefinedType(ps1, decls1) => tp2 match { case RefinedType(ps2, decls2) => isSameTypes(ps1, ps2) && (decls1 isSameScope decls2) ; case _ => false } + case SingleType(pre1, sym1) => tp2 match { case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1, pre1, sym2, pre2) ; case _ => false } + case PolyType(ps1, res1) => tp2 match { case PolyType(ps2, res2) => equalTypeParamsAndResult(ps1, res1, ps2, res2) ; case _ => false } + case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2) ; case _ => false } + case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 == sym2 ; case _ => false } + case ConstantType(c1) => tp2 match { case ConstantType(c2) => c1 == c2 ; case _ => false } + case NullaryMethodType(res1) => tp2 match { case NullaryMethodType(res2) => res1 =:= res2 ; case _ => false } + case TypeBounds(lo1, hi1) => tp2 match { case TypeBounds(lo2, hi2) => lo1 =:= lo2 && hi1 =:= hi2 ; case _ => false } + case _ => false } + + ( sameTypeAndSameCaseClass + || sameSingletonType + || mutateNonTypeConstructs(tp1, tp2) + || mutateNonTypeConstructs(tp2, tp1) + ) } def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index a002b01f70..b0feb0a7fb 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -66,7 +66,7 @@ private[internal] trait TypeConstraints { def clear() { lock() try { - if (settings.debug.value) + if (settings.debug) self.log("Clearing " + log.size + " entries from the undoLog.") log = Nil } finally unlock() diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala index 263b0f5a3e..c86383e9e3 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -14,7 +14,7 @@ private[internal] trait TypeToStrings { protected def typeToString(tpe: Type): String = if (tostringRecursions >= maxTostringRecursions) { devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) - if (settings.debug.value) + if (settings.debug) (new Throwable).printStackTrace "..." diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index 32d3171b26..1f7638a621 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -25,7 +25,14 @@ trait UnCurry { val tp = expandAlias(tp0) tp match { case MethodType(params, MethodType(params1, restpe)) => - apply(MethodType(params ::: params1, restpe)) + // This transformation is described in UnCurryTransformer.dependentParamTypeErasure + val packSymbolsMap = new TypeMap { + // Wrapping in a TypeMap to reuse the code that opts for a fast path if the function is an identity. + def apply(tp: Type): Type = packSymbols(params, tp) + } + val existentiallyAbstractedParam1s = packSymbolsMap.mapOver(params1) + val substitutedResult = restpe.substSym(params1, existentiallyAbstractedParam1s) + apply(MethodType(params ::: existentiallyAbstractedParam1s, substitutedResult)) case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) => abort("unexpected curried method types with intervening existential") case MethodType(h :: t, restpe) if h.isImplicit => diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 63b7f73386..d6fca9d186 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -76,6 +76,19 @@ trait Collections { lb.toList } + final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = { + val buf = new ListBuffer[A] + val seen = mutable.Set[B]() + xs foreach { x => + val y = f(x) + if (!seen(y)) { + buf += x + seen += y + } + } + buf.toList + } + @tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = { xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail) } diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 8b69efc749..4ac56da628 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -11,6 +11,7 @@ import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, Buffe import java.io.{ File => JFile } import java.net.URL import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.util.Statistics /** * An abstraction over files for use in the reflection/compiler libraries. @@ -112,7 +113,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] { def underlyingSource: Option[AbstractFile] = None /** Does this abstract file denote an existing file? */ - def exists: Boolean = (file eq null) || file.exists + def exists: Boolean = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + (file eq null) || file.exists + } /** Does this abstract file represent something which can contain classfiles? */ def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip")) diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala new file mode 100644 index 0000000000..64e1e952cd --- /dev/null +++ b/src/reflect/scala/reflect/io/IOStats.scala @@ -0,0 +1,31 @@ +package scala.reflect.io + +import scala.reflect.internal.util.Statistics + +// Due to limitations in the Statistics machinery, these are only +// reported if this patch is applied. +// +// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala +// @@ -109,7 +109,7 @@ quant) +// * Quantities with non-empty prefix are printed in the statistics info. +// */ +// trait Quantity { +// - if (enabled && prefix.nonEmpty) { +// + if (prefix.nonEmpty) { +// val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" +// qs(key) = this +// } +// @@ -243,7 +243,7 @@ quant) +// * +// * to remove all Statistics code from build +// */ +// - final val canEnable = _enabled +// + final val canEnable = true // _enabled +// +// We can commit this change as the first diff reverts a fix for an IDE memory leak. +private[io] object IOStats { + val fileExistsCount = Statistics.newCounter("# File.exists calls") + val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls") + val fileIsFileCount = Statistics.newCounter("# File.isFile calls") +} diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 44fb41a1cd..56d4faed99 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -13,6 +13,7 @@ import java.io.{ File => JFile } import java.net.{ URI, URL } import scala.util.Random.alphanumeric import scala.language.implicitConversions +import scala.reflect.internal.util.Statistics /** An abstraction for filesystem paths. The differences between * Path, File, and Directory are primarily to communicate intent. @@ -57,8 +58,18 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { - if (jfile.isFile) new File(jfile) - else if (jfile.isDirectory) new Directory(jfile) + def isFile = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + jfile.isFile + } + + def isDirectory = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + jfile.isDirectory + } + + if (isFile) new File(jfile) + else if (isDirectory) new Directory(jfile) else new Path(jfile) } catch { case ex: SecurityException => new Path(jfile) } @@ -187,10 +198,19 @@ class Path private[io] (val jfile: JFile) { // Boolean tests def canRead = jfile.canRead() def canWrite = jfile.canWrite() - def exists = try jfile.exists() catch { case ex: SecurityException => false } + def exists = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + try jfile.exists() catch { case ex: SecurityException => false } + } - def isFile = try jfile.isFile() catch { case ex: SecurityException => false } - def isDirectory = try jfile.isDirectory() catch { case ex: SecurityException => false } + def isFile = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + try jfile.isFile() catch { case ex: SecurityException => false } + } + def isDirectory = { + if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + try jfile.isDirectory() catch { case ex: SecurityException => false } + } def isAbsolute = jfile.isAbsolute() def isEmpty = path.length == 0 diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 31df78f995..b892fe7cef 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -56,8 +56,14 @@ class PlainFile(val givenPath: Path) extends AbstractFile { /** Returns all abstract subfiles of this abstract directory. */ def iterator: Iterator[AbstractFile] = { + // Optimization: Assume that the file was not deleted and did not have permissions changed + // between the call to `list` and the iteration. This saves a call to `exists`. + def existsFast(path: Path) = path match { + case (_: Directory | _: io.File) => true + case _ => path.exists + } if (!isDirectory) Iterator.empty - else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_)) + else givenPath.toDirectory.list filter existsFast map (new PlainFile(_)) } /** diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index e58e89a4b1..3211bb7919 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -6,12 +6,14 @@ import scala.collection.mutable.WeakHashMap import java.lang.{Class => jClass, Package => jPackage} import java.lang.reflect.{ - Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField, + Method => jMethod, Constructor => jConstructor, Field => jField, Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray, + AccessibleObject => jAccessibleObject, GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement } import java.lang.annotation.{Annotation => jAnnotation} import java.io.IOException -import internal.MissingRequirementError +import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags, JMethodOrConstructor } +import JavaAccFlags._ import internal.pickling.ByteCodecs import internal.ClassfileConstants._ import internal.pickling.UnPickler @@ -88,12 +90,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni // ----------- Caching ------------------------------------------------------------------ - private val classCache = new TwoWayCache[jClass[_], ClassSymbol] - private val packageCache = new TwoWayCache[Package, ModuleSymbol] - private val methodCache = new TwoWayCache[jMethod, MethodSymbol] + private val classCache = new TwoWayCache[jClass[_], ClassSymbol] + private val packageCache = new TwoWayCache[Package, ModuleSymbol] + private val methodCache = new TwoWayCache[jMethod, MethodSymbol] private val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol] - private val fieldCache = new TwoWayCache[jField, TermSymbol] - private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] + private val fieldCache = new TwoWayCache[jField, TermSymbol] + private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S = cache.toScala(key){ @@ -101,38 +103,36 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni body(mirrorDefining(jclazz), key) } - private implicit val classHasJavaClass: HasJavaClass[jClass[_]] = - new HasJavaClass(identity) - private implicit val methHasJavaClass: HasJavaClass[jMethod] - = new HasJavaClass(_.getDeclaringClass) - private implicit val fieldHasJavaClass: HasJavaClass[jField] = - new HasJavaClass(_.getDeclaringClass) - private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] = - new HasJavaClass(_.getDeclaringClass) + private implicit val classHasJavaClass: HasJavaClass[jClass[_]] = new HasJavaClass(identity) + private implicit val methHasJavaClass: HasJavaClass[jMethod] = new HasJavaClass(_.getDeclaringClass) + private implicit val fieldHasJavaClass: HasJavaClass[jField] = new HasJavaClass(_.getDeclaringClass) + private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] = new HasJavaClass(_.getDeclaringClass) private implicit val tparamHasJavaClass: HasJavaClass[jTypeVariable[_ <: GenericDeclaration]] = new HasJavaClass ( (tparam: jTypeVariable[_ <: GenericDeclaration]) => { tparam.getGenericDeclaration match { - case jclazz: jClass[_] => jclazz - case jmeth: jMethod => jmeth.getDeclaringClass + case jclazz: jClass[_] => jclazz + case jmeth: jMethod => jmeth.getDeclaringClass case jconstr: jConstructor[_] => jconstr.getDeclaringClass } }) // ----------- Implementations of mirror operations and classes ------------------- - private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror") - private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror") - private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror") - private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror") - private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}") - private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym") - private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException( + private def abort(msg: String) = throw new ScalaReflectionException(msg) + + private def ErrorInnerClass(sym: Symbol) = abort(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror") + private def ErrorInnerModule(sym: Symbol) = abort(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror") + private def ErrorStaticClass(sym: Symbol) = abort(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror") + private def ErrorStaticModule(sym: Symbol) = abort(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror") + private def ErrorNotMember(sym: Symbol, owner: Symbol) = abort(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}") + private def ErrorNotField(sym: Symbol) = abort(s"expected a field or an accessor method symbol, you provided $sym") + private def ErrorSetImmutableField(sym: Symbol) = abort(s"cannot set an immutable field ${sym.name}") + private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = abort(s"expected a constructor of $owner, you provided $sym") + private def ErrorFree(member: Symbol, freeType: Symbol) = abort(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}") + private def ErrorNonExistentField(sym: Symbol) = abort( sm"""Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method |note that private parameters of class constructors don't get mapped onto fields and/or accessors, |unless they are used outside of their declaring constructors.""") - private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}") - private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym") - private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}") /** Helper functions for extracting typed values from a (Class[_], Any) * representing an annotation argument. @@ -210,7 +210,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni case _ => body } } - private def checkMemberOf(sym: Symbol, owner: ClassSymbol) { if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) { // do nothing @@ -236,8 +235,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni if (staticClazz.isPrimitive) staticClazz else dynamicClazz } - private class JavaInstanceMirror[T: ClassTag](val instance: T) - extends InstanceMirror { + private class JavaInstanceMirror[T: ClassTag](val instance: T) extends InstanceMirror { def symbol = thisMirror.classSymbol(preciseClass(instance)) def reflectField(field: TermSymbol): FieldMirror = { checkMemberOf(field, symbol) @@ -269,12 +267,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol) extends FieldMirror { - lazy val jfield = { - val jfield = fieldToJava(symbol) - if (!jfield.isAccessible) jfield.setAccessible(true) - jfield - } - def get = jfield.get(receiver) + lazy val jfield = ensureAccessible(fieldToJava(symbol)) + def get = jfield get receiver def set(value: Any) = { if (!symbol.isMutable) ErrorSetImmutableField(symbol) jfield.set(receiver, value) @@ -338,15 +332,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni } } } - - private abstract class JavaMethodMirror(val symbol: MethodSymbol) - extends MethodMirror { - lazy val jmeth = { - val jmeth = methodToJava(symbol) - if (!jmeth.isAccessible) jmeth.setAccessible(true) - jmeth - } - + private abstract class JavaMethodMirror(val symbol: MethodSymbol) extends MethodMirror { + lazy val jmeth = ensureAccessible(methodToJava(symbol)) def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*) def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = { @@ -418,13 +405,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni if (!perfectMatch && !varargMatch) { val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}" val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments" - throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments") + abort(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments") } def objReceiver = receiver.asInstanceOf[AnyRef] def objArg0 = args(0).asInstanceOf[AnyRef] def objArgs = args.asInstanceOf[Seq[AnyRef]] - def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors") + def fail(msg: String) = abort(msg + ", it cannot be invoked with mirrors") def invokePrimitiveMethod = { val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString) @@ -464,14 +451,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni extends MethodMirror { def bind(newReceiver: Any) = new JavaConstructorMirror(newReceiver.asInstanceOf[AnyRef], symbol) override val receiver = outer - lazy val jconstr = { - val jconstr = constructorToJava(symbol) - if (!jconstr.isAccessible) jconstr.setAccessible(true) - jconstr - } + lazy val jconstr = ensureAccessible(constructorToJava(symbol)) def apply(args: Any*): Any = { if (symbol.owner == ArrayClass) - throw new ScalaReflectionException("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead") + abort("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead") val effectiveArgs = if (outer == null) args.asInstanceOf[Seq[AnyRef]] @@ -532,7 +515,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni } def javaClass(path: String): jClass[_] = - Class.forName(path, true, classLoader) + jClass.forName(path, true, classLoader) /** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */ def tryJavaClass(path: String): Option[jClass[_]] = ( @@ -562,7 +545,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe) def handleError(ex: Exception) = { markAbsent(ErrorType) - if (settings.debug.value) ex.printStackTrace() + if (settings.debug) ex.printStackTrace() val msg = ex.getMessage() MissingRequirementError.signal( (if (msg eq null) "reflection error while loading " + clazz.name @@ -655,13 +638,22 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList // SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser val jexTpes = jann match { - case jm: jMethod => jm.getExceptionTypes.toList + case jm: jMethod => jm.getExceptionTypes.toList case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList - case _ => Nil + case _ => Nil } jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe))) } + private implicit class jClassOps(val clazz: jClass[_]) { + def javaFlags: JavaAccFlags = JavaAccFlags(clazz) + def scalaFlags: Long = javaFlags.toScalaFlags + } + private implicit class jMemberOps(val member: jMember) { + def javaFlags: JavaAccFlags = JavaAccFlags(member) + def scalaFlags: Long = javaFlags.toScalaFlags + } + /** * A completer that fills in the types of a Scala class and its companion object * by copying corresponding type info from a Java class. This completer is used @@ -681,14 +673,14 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni override def load(sym: Symbol): Unit = { debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym) - val flags = toScalaClassFlags(jclazz.getModifiers) + val flags = jclazz.scalaFlags clazz setFlag (flags | JAVA) if (module != NoSymbol) { module setFlag (flags & PRIVATE | JAVA) module.moduleClass setFlag (flags & PRIVATE | JAVA) } - relatedSymbols foreach (importPrivateWithinFromJavaFlags(_, jclazz.getModifiers)) + propagatePackageBoundary(jclazz, relatedSymbols: _*) copyAnnotations(clazz, jclazz) // to do: annotations to set also for module? @@ -720,28 +712,21 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass) } - def enter(sym: Symbol, mods: Int) = - (if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym + def enter(sym: Symbol, mods: JavaAccFlags) = + ( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym for (jinner <- jclazz.getDeclaredClasses) jclassAsScala(jinner) // inner class is entered as a side-effect // no need to call enter explicitly - pendingLoadActions = { () => - - for (jfield <- jclazz.getDeclaredFields) - enter(jfieldAsScala(jfield), jfield.getModifiers) - - for (jmeth <- jclazz.getDeclaredMethods) - enter(jmethodAsScala(jmeth), jmeth.getModifiers) - - for (jconstr <- jclazz.getConstructors) - enter(jconstrAsScala(jconstr), jconstr.getModifiers) - - } :: pendingLoadActions + pendingLoadActions ::= { () => + jclazz.getDeclaredFields foreach (f => enter(jfieldAsScala(f), f.javaFlags)) + jclazz.getDeclaredMethods foreach (m => enter(jmethodAsScala(m), m.javaFlags)) + jclazz.getConstructors foreach (c => enter(jconstrAsScala(c), c.javaFlags)) + } if (parentsLevel == 0) { - while (!pendingLoadActions.isEmpty) { + while (pendingLoadActions.nonEmpty) { val item = pendingLoadActions.head pendingLoadActions = pendingLoadActions.tail item() @@ -760,8 +745,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * If Java modifiers `mods` contain STATIC, return the module class * of the companion module of `clazz`, otherwise the class `clazz` itself. */ - private def followStatic(clazz: Symbol, mods: Int) = - if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz + private def followStatic(clazz: Symbol, mods: JavaAccFlags) = + if (mods.isStatic) clazz.companionModule.moduleClass else clazz /** Methods which need to be treated with care * because they either are getSimpleName or call getSimpleName: @@ -797,7 +782,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni if (jclazz.isMemberClass) { val jEnclosingClass = jclazz.getEnclosingClass val sEnclosingClass = classToScala(jEnclosingClass) - followStatic(sEnclosingClass, jclazz.getModifiers) + followStatic(sEnclosingClass, jclazz.javaFlags) } else if (jclazz.isLocalClass0) { val jEnclosingMethod = jclazz.getEnclosingMethod if (jEnclosingMethod != null) { @@ -825,7 +810,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * The Scala owner of the Scala symbol corresponding to the Java member `jmember` */ private def sOwner(jmember: jMember): Symbol = { - followStatic(classToScala(jmember.getDeclaringClass), jmember.getModifiers) + followStatic(classToScala(jmember.getDeclaringClass), jmember.javaFlags) } /** @@ -866,7 +851,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def methodToScala1(jmeth: jMethod): MethodSymbol = { val jOwner = jmeth.getDeclaringClass val preOwner = classToScala(jOwner) - val owner = followStatic(preOwner, jmeth.getModifiers) + val owner = followStatic(preOwner, jmeth.javaFlags) (lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth)) .asMethod } @@ -880,7 +865,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni toScala(constructorCache, jconstr)(_ constructorToScala1 _) private def constructorToScala1(jconstr: jConstructor[_]): MethodSymbol = { - val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.getModifiers) + val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.javaFlags) (lookup(owner, jconstr.getName) suchThat (erasesTo(_, jconstr)) orElse jconstrAsScala(jconstr)) .asMethod } @@ -1018,6 +1003,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni case jmeth: jMethod => methodToScala(jmeth) case jconstr: jConstructor[_] => constructorToScala(jconstr) } + def reflectMemberToScala(m: jMember): Symbol = m match { + case x: GenericDeclaration => genericDeclarationToScala(x) + case x: jField => jfieldAsScala(x) + } /** * Given some Java type arguments, a corresponding list of Scala types, plus potentially @@ -1092,10 +1081,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def jfieldAsScala1(jfield: jField): TermSymbol = { val field = sOwner(jfield) - .newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers)) + .newValue(newTermName(jfield.getName), NoPosition, jfield.scalaFlags) .setInfo(typeToScala(jfield.getGenericType)) - fieldCache enter (jfield, field) - importPrivateWithinFromJavaFlags(field, jfield.getModifiers) + + fieldCache.enter(jfield, field) + propagatePackageBoundary(jfield, field) copyAnnotations(field, jfield) field } @@ -1115,16 +1105,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = { val clazz = sOwner(jmeth) - val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, toScalaMethodFlags(jmeth.getModifiers)) + val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags) methodCache enter (jmeth, meth) val tparams = jmeth.getTypeParameters.toList map createTypeParameter val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala val resulttpe = typeToScala(jmeth.getGenericReturnType) setMethType(meth, tparams, paramtpes, resulttpe) - importPrivateWithinFromJavaFlags(meth, jmeth.getModifiers) + propagatePackageBoundary(jmeth.javaFlags, meth) copyAnnotations(meth, jmeth) - if ((jmeth.getModifiers & JAVA_ACC_VARARGS) != 0) meth.setInfo(arrayToRepeated(meth.info)) - meth + + if (jmeth.javaFlags.isVarargs) + meth modifyInfo arrayToRepeated + else + meth } /** @@ -1139,13 +1132,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def jconstrAsScala1(jconstr: jConstructor[_]): MethodSymbol = { // [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out. val clazz = sOwner(jconstr) - val constr = clazz.newConstructor(NoPosition, toScalaMethodFlags(jconstr.getModifiers)) + val constr = clazz.newConstructor(NoPosition, jconstr.scalaFlags) constructorCache enter (jconstr, constr) val tparams = jconstr.getTypeParameters.toList map createTypeParameter val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala setMethType(constr, tparams, paramtpes, clazz.tpe_*) constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe)) - importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers) + propagatePackageBoundary(jconstr.javaFlags, constr) copyAnnotations(constr, jconstr) constr } @@ -1170,13 +1163,15 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni else if (clazz.isTopLevel) javaClass(clazz.javaClassName) else if (clazz.owner.isClass) { - val childOfClass = !clazz.owner.isModuleClass - val childOfTopLevel = clazz.owner.isTopLevel + val childOfClass = !clazz.owner.isModuleClass + val childOfTopLevel = clazz.owner.isTopLevel val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel // suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759 var ownerClazz = classToJava(clazz.owner.asClass) - if (childOfTopLevelObject) ownerClazz = Class.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader) + if (childOfTopLevelObject) + ownerClazz = jClass.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader) + val ownerChildren = ownerClazz.getDeclaredClasses var fullNameOfJavaClass = ownerClazz.getName @@ -1241,11 +1236,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * Pre: Scala type is already transformed to Java level. */ def typeToJavaClass(tpe: Type): jClass[_] = tpe match { - case ExistentialType(_, rtpe) => typeToJavaClass(rtpe) - case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe)) - case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass) + case ExistentialType(_, rtpe) => typeToJavaClass(rtpe) + case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe)) + case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass) case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias) - case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found") + case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found") } } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index a130013398..4d90afcdc3 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -14,7 +14,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S lazy val settings = new Settings private val isLogging = sys.props contains "scala.debug.reflect" - def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) + def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) type TreeCopier = InternalTreeCopierOps def newStrictTreeCopier: TreeCopier = new StrictTreeCopier diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index 5c08e9a508..ade7a4a21a 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -11,10 +11,10 @@ import scala.reflect.internal.Flags._ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps { def info(msg: => String) = - if (settings.verbose.value) println("[reflect-compiler] "+msg) + if (settings.verbose) println("[reflect-compiler] "+msg) def debugInfo(msg: => String) = - if (settings.debug.value) info(msg) + if (settings.debug) info(msg) /** Declares that this is a runtime reflection universe. * diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala index 7195424cf9..9e87b6ba55 100644 --- a/src/repl/scala/tools/nsc/MainGenericRunner.scala +++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala @@ -17,7 +17,7 @@ object JarRunner extends CommonRunner { val jarURLs = ClassPath expandManifestPath jarPath val urls = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs - if (settings.Ylogcp.value) { + if (settings.Ylogcp) { Console.err.println("Running jar with these URLs as the classpath:") urls foreach println } @@ -46,7 +46,7 @@ class MainGenericRunner { def sampleCompiler = new Global(settings) // def so its not created unless needed if (!command.ok) return errorFn("\n" + command.shortUsageMsg) - else if (settings.version.value) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString)) + else if (settings.version) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString)) else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler) def isE = !settings.execute.isDefault diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 599a061984..df28e428ce 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -628,10 +628,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) * with SimpleReader. */ def chooseReader(settings: Settings): InteractiveReader = { - if (settings.Xnojline.value || Properties.isEmacsShell) + if (settings.Xnojline || Properties.isEmacsShell) SimpleReader() else try new JLineReader( - if (settings.noCompletion.value) NoCompletion + if (settings.noCompletion) NoCompletion else new JLineCompletion(intp) ) catch { diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index d2b6cdd7f0..4ba81b634a 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -397,7 +397,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } private[nsc] def replwarn(msg: => String) { - if (!settings.nowarnings.value) + if (!settings.nowarnings) printMessage(msg) } diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala index e517a16b32..da6d271a68 100644 --- a/src/repl/scala/tools/nsc/interpreter/Power.scala +++ b/src/repl/scala/tools/nsc/interpreter/Power.scala @@ -70,7 +70,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re pass += 1 val (repeats, unseen) = todo partition seen unseenHistory += unseen.size - if (settings.verbose.value) { + if (settings.verbose) { println("%3d %s accumulated, %s discarded. This pass: %s unseen, %s repeats".format( pass, keep.size, discarded, unseen.size, repeats.size)) } diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala index b4d2adaad4..4607684c0d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala @@ -126,7 +126,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor } private[doc] def docdbg(msg: String) { - if (settings.Ydocdebug.value) + if (settings.Ydocdebug) println(msg) } } diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index 5ad50445a8..bf6d6ffed7 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -91,7 +91,7 @@ trait ScaladocAnalyzer extends Analyzer { typedStats(trees, NoSymbol) useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - if (settings.debug.value) + if (settings.debug) useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) useCase.defined diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala index 9447e36610..ea45ca1a56 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala @@ -28,7 +28,7 @@ trait Uncompilable { lazy val pairs = files flatMap { f => val comments = docPairs(f.slurp()) - if (settings.verbose.value) + if (settings.verbose) inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", "))) comments @@ -37,7 +37,7 @@ trait Uncompilable { def symbols = pairs map (_._1) def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet def comments = { - if (settings.debug.value || settings.verbose.value) + if (settings.debug || settings.verbose) inform("Found %d uncompilable files: %s".format(files.size, files mkString ", ")) if (pairs.isEmpty) diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala index 2a07547de2..ac5fec80b3 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala @@ -73,16 +73,20 @@ object EntityLink { def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link)) } final case class HtmlTag(data: String) extends Inline { - def canClose(open: HtmlTag) = { - open.data.stripPrefix("<") == data.stripPrefix("</") + private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r + private val (isEnd, tagName) = data match { + case Pattern(s1, s2) => + (! s1.isEmpty, Some(s2.toLowerCase)) + case _ => + (false, None) } - def close = { - if (data.indexOf("</") == -1) - Some(HtmlTag("</" + data.stripPrefix("<"))) - else - None + def canClose(open: HtmlTag) = { + isEnd && tagName == open.tagName } + + private val TagsNotToClose = Set("br", "img") + def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") } } /** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index 229e26d699..9edd5afa13 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -60,7 +60,7 @@ abstract class HtmlPage extends Page { thisPage => w.write(xml.Xhtml.toXhtml(html)) } - if (site.universe.settings.docRawOutput.value) + if (site.universe.settings.docRawOutput) writeFile(site, ".raw") { // we're only interested in the body, as this will go into the diff _.write(body.text) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala index 5781e680dd..348ea97c5b 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala @@ -6,6 +6,7 @@ package scala.tools.nsc.doc.html import scala.xml.NodeSeq +import scala.annotation.tailrec /** Highlight the syntax of Scala code appearing in a `{{{` wiki block * (see method `HtmlPage.blockToHtml`). @@ -209,9 +210,9 @@ private[html] object SyntaxHigh { out.toString } - def parse(pre: String, i: Int): Int = { + @tailrec def parse(pre: String, i: Int): Unit = { out append pre - if (i == buf.length) return i + if (i == buf.length) return buf(i) match { case '\n' => parse("\n", i+1) @@ -277,7 +278,6 @@ private[html] object SyntaxHigh { } else parse(buf(i).toChar.toString, i+1) } - i } parse("", 0) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala index ec00cace75..ab8e9e2756 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala @@ -42,7 +42,7 @@ object DiagramStats { private[this] var fixedImages = 0 def printStats(settings: Settings) = { - if (settings.docDiagramsDebug.value) { + if (settings.docDiagramsDebug) { settings.printMsg("\nDiagram generation running time breakdown:\n") filterTrack.printStats(settings.printMsg) modelTrack.printStats(settings.printMsg) @@ -63,4 +63,4 @@ object DiagramStats { def addBrokenImage(): Unit = brokenImages += 1 def addFixedImage(): Unit = fixedImages += 1 -}
\ No newline at end of file +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 837d9e6f21..7d146b4a5f 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -329,7 +329,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { NodeSeq.Empty } catch { case exc: Exception => - if (settings.docDiagramsDebug.value) { + if (settings.docDiagramsDebug) { settings.printMsg("\n\n**********************************************************************") settings.printMsg("Encountered an error while generating page for " + template.qualifiedName) settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t","")) diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 895cc84f39..cc228082c1 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -272,7 +272,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { assert(!(docTemplatesCache isDefinedAt sym), sym) docTemplatesCache += (sym -> this) - if (settings.verbose.value) + if (settings.verbose) inform("Creating doc template for " + sym) override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot @@ -350,7 +350,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added // lazily, on completeModel val conversions: List[ImplicitConversionImpl] = - if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil + if (settings.docImplicits) makeImplicitConversions(sym, this) else Nil // members as given by the compiler lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList @@ -924,7 +924,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def ownerTpl(sym: Symbol): Symbol = if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) val tpe = - if (thisFactory.settings.useStupidTypes.value) aType else { + if (thisFactory.settings.useStupidTypes) aType else { def ownerTpl(sym: Symbol): Symbol = if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym @@ -1036,7 +1036,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // whether or not to create a page for an {abstract,alias} type def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) = - (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) || + (settings.docExpandAllTypes && (bSym.sourceFile != null)) || (bSym.isAliasType || bSym.isAbstractType) && { val rawComment = global.expandedDocComment(bSym, inTpl.sym) rawComment.contains("@template") || rawComment.contains("@documentable") } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 71b0a00e0a..1f87f935f2 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -241,7 +241,7 @@ trait ModelFactoryImplicitSupport { available match { case Some(true) => Nil - case Some(false) if (!settings.docImplicitsShowAll.value) => + case Some(false) if !settings.docImplicitsShowAll => // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String]) throw new ImplicitNotFound(implType) case _ => diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index 99e9059d79..d5048dcfa3 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -140,7 +140,7 @@ trait ModelFactoryTypeSupport { } val prefix = - if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) { + if (!settings.docNoPrefixes && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) { if (!owner.isRefinementClass) { val qName = makeQualifiedName(owner, Some(inTpl.sym)) if (qName != "") qName + "." else "" @@ -308,7 +308,7 @@ trait ModelFactoryTypeSupport { // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the // same type based on the template the type is shown in. - if (settings.docNoPrefixes.value) + if (settings.docNoPrefixes) typeCache.getOrElseUpdate(aType, createTypeEntity) else createTypeEntity } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 6395446d3b..44d8886e4e 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -248,7 +248,7 @@ trait DiagramDirectiveParser { hideNodesFilter = hideNodesFilter0, hideEdgesFilter = hideEdgesFilter0) - if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram) + if (settings.docDiagramsDebug && result != NoDiagramAtAll && result != FullDiagram) settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result) tFilter += System.currentTimeMillis |